public async Task <string> AnalyzeImageProcessAsync(VideoCamera videoCamera, ImageProcess imageProcess, AnalyzeRequest analyzeRequest) { var connectionApiData = new ConnectionApiData { Location = analyzeRequest.Location, SubscriptionKey = analyzeRequest.SubscriptionKey }; Languages enumLanguages = (Languages)analyzeRequest.IdLanguages; RequestType enumRequestTypes = (RequestType)analyzeRequest.TypeCode; switch (enumRequestTypes) { case RequestType.SceneChange: return(string.Empty); case RequestType.Face: return(await AnalyzeFace(imageProcess.ImageFile, videoCamera.IdPersonGroupsAPI, connectionApiData)); case RequestType.Carplate: return(await AnalyzeCarPlate(enumLanguages.ToString(), imageProcess.ImageFile, connectionApiData)); case RequestType.ImageDescription: return(await AnalyzeImageDescription(enumLanguages.ToString(), imageProcess.ImageFile, connectionApiData)); } return(null); }
public async Task TrainPersonGroupAsync(string personGroupId, ConnectionApiData connectionApiData) { using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) { await faceClient.TrainPersonGroupAsync(personGroupId); } }
public async Task CreatePersonGroupAsync(string id, string name, ConnectionApiData connectionApiData) { using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) { await faceClient.CreatePersonGroupAsync(id, name); } }
protected HttpClient GetAzureApiClient(ConnectionApiData connectionApiData) { var apiClient = new HttpClient(); apiClient.BaseAddress = new Uri(connectionApiData.Location); apiClient.DefaultRequestHeaders.Add( "Ocp-Apim-Subscription-Key", connectionApiData.SubscriptionKey); return(apiClient); }
private async Task <List <Guid> > GetFaceIds(byte[] image, ConnectionApiData connectionApiData) { using (var msImage = new MemoryStream(image)) using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) { Face[] faces = await faceClient.DetectAsync(msImage); var faceIds = faces.Count() > 0 ? faces.Select(f => f.FaceId).ToList() : null; if (faceIds == null) { _logger.LogError(string.Concat("AzureFaceAdapter.GetFaceIds::Cannot get faces from image")); } return(faceIds); } }
public async Task <string> RecognizeTextAsync(string mode, byte[] image, ConnectionApiData connectionApiData) { var uri = string.Format(_azureApiData.RecognizeTextUri, mode); using (var azureClientApi = GetAzureApiClient(connectionApiData)) using (var postContent = new ByteArrayContent(image)) { postContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); using (var response = await azureClientApi.PostAsync(uri, postContent)) { if (!response.IsSuccessStatusCode) { _logger.LogError(string.Concat("AzureComputerVisionAdapter.RecognizeTextAsync::Cannot Recognize Text image", response.ToString())); return(null); } return(await response.Content.ReadAsStringAsync()); } } }
public async Task <string> DescribeImageAsync(string language, byte[] image, ConnectionApiData connectionApiData) { return(await Task.FromResult(@" { ""description"": { ""tags"": [ ""person"", ""man"", ""outdoor"", ""window"", ""glasses"" ], ""captions"": [ { ""text"": ""Satya Nadella sitting on a bench"", ""confidence"": 0.48293603002174407 }, { ""text"": ""Satya Nadella is sitting on a bench"", ""confidence"": 0.40037006815422832 }, { ""text"": ""Satya Nadella sitting in front of a building"", ""confidence"": 0.38035155997373377 } ] }, ""requestId"": ""ed2de1c6-fb55-4686-b0da-4da6e05d283f"", ""metadata"": { ""width"": 1500, ""height"": 1000, ""format"": ""Jpeg"" } } ")); }
private async Task <string> AnalyzeCarPlate(string language, byte[] image, ConnectionApiData connectionApiData) { return(await _computerVisionAdapter.OCRAsync(language, image, connectionApiData)); }
public async Task <string> CreatePersonGroupPersonAsync(string personGroupId, string name, ConnectionApiData connectionApiData) { using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) { var result = await faceClient.CreatePersonAsync(personGroupId, name); return(result.PersonId.ToString()); } }
public async Task <string> AddFacePersonGroupPersonAsync(string personGroupId, string personId, byte[] image, ConnectionApiData connectionApiData) { return(await Task.FromResult(Guid.NewGuid().ToString())); }
public async Task TrainPersonGroupAsync(string personGroupId, ConnectionApiData connectionApiData) { await Task.FromResult(0); }
public async Task <string> IdentifyFaceAsync(byte[] image, string personGroupId, ConnectionApiData connectionApiData) { return(await Task.FromResult(@" { [ { ""faceId"": ""c5c24a82-6845-4031-9d5d-978df9175426"", ""candidates"": [ { ""personId"": ""25985303-c537-4467-b41d-bdb45cd95ca1"", ""confidence"": 0.92 } ] }, { ""faceId"": ""65d083d4-9447-47d1-af30-b626144bf0fb"", ""candidates"": [ { ""personId"": ""2ae4935b-9659-44c3-977f-61fac20d0538"", ""confidence"": 0.89 } ] } ] } ")); }
public async Task <string> GetTrainingStatusPersonGroupAsync(string personGroupId, ConnectionApiData connectionApiData) { return(await Task.FromResult(@" { ""status"": ""succeeded"", ""createdDateTime"": ""12/21/2017 12:57:27"", ""lastActionDateTime"": ""12/21/2017 12:57:30"", ""message"": null } ")); }
public async Task <string> CreatePersonGroupPersonAsync(string personGroupId, string name, ConnectionApiData connectionApiData) { return(await Task.FromResult(Guid.NewGuid().ToString())); }
public async Task CreatePersonGroupAsync(string id, string name, ConnectionApiData connectionApiData) { await Task.FromResult(0); }
public async Task <string> AddFacePersonGroupPersonAsync(string personGroupId, string personId, byte[] image, ConnectionApiData connectionApiData) { using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) using (var ms = new MemoryStream(image)) { var result = await faceClient.AddPersonFaceAsync(personGroupId, new Guid(personId), ms); return(result.PersistedFaceId.ToString()); } }
private async Task <string> AnalyzeImageDescription(string language, byte[] image, ConnectionApiData connectionApiData) { return(await _computerVisionAdapter.DescribeImageAsync(language, image, connectionApiData)); }
public async Task <string> IdentifyFaceAsync(byte[] image, string personGroupId, ConnectionApiData connectionApiData) { var faceIds = await GetFaceIds(image, connectionApiData); if (faceIds == null) { return(null); } //var ss = await faceClient.IdentifyAsync(personGroupId, faceIds.ToArray()); var uri = _azureApiData.IdentifyFaceUri; var data = new { faceIds = faceIds, personGroupId = personGroupId }; using (var azureClientApi = GetAzureApiClient(connectionApiData)) using (var postContent = new StringContent(JsonConvert.SerializeObject(data), Encoding.UTF8, "application/json")) using (var response = await azureClientApi.PostAsync(uri, postContent)) { if (!response.IsSuccessStatusCode) { _logger.LogError(string.Concat("AzureFaceAdapter.CreatePersonGroupAsync::Cannot create person group with azureApi: ", response.ToString())); return(null); } return(await response.Content.ReadAsStringAsync()); } }
private async Task <string> AnalyzeFace(byte[] image, string personGroupId, ConnectionApiData connectionApiData) { return(await _faceAdapter.IdentifyFaceAsync(image, personGroupId, connectionApiData)); }
public async Task <string> OCRAsync(string language, byte[] image, ConnectionApiData connectionApiData) { return(await Task.FromResult(@" { ""language"": ""en"", ""textAngle"": -2.0000000000000338, ""orientation"": ""Up"", ""regions"": [ { ""boundingBox"": ""462,379,497,258"", ""lines"": [ { ""boundingBox"": ""462,379,497,74"", ""words"": [ { ""boundingBox"": ""462,379,41,73"", ""text"": ""A"" }, { ""boundingBox"": ""523,379,153,73"", ""text"": ""GOAL"" }, { ""boundingBox"": ""694,379,265,74"", ""text"": ""WITHOUT"" } ] }, { ""boundingBox"": ""565,471,289,74"", ""words"": [ { ""boundingBox"": ""565,471,41,73"", ""text"": ""A"" }, { ""boundingBox"": ""626,471,150,73"", ""text"": ""PLAN"" }, { ""boundingBox"": ""801,472,53,73"", ""text"": ""IS"" } ] }, { ""boundingBox"": ""519,563,375,74"", ""words"": [ { ""boundingBox"": ""519,563,149,74"", ""text"": ""JUST"" }, { ""boundingBox"": ""683,564,41,72"", ""text"": ""A"" }, { ""boundingBox"": ""741,564,153,73"", ""text"": ""WISH"" } ] } ] } ] } ")); }
public async Task <string> RecognizeTextAsync(string mode, byte[] image, ConnectionApiData connectionApiData) { return(await Task.FromResult(@"")); }
public async Task <string> GetTrainingStatusPersonGroupAsync(string personGroupId, ConnectionApiData connectionApiData) { using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) { var trainingStatus = await faceClient.GetPersonGroupTrainingStatusAsync(personGroupId); return(trainingStatus.Status.ToString()); } }