public void Detect_WhenExtensionIsRegistered_ReturnsLanguageFromExtension(string bufferContentType, AnalysisLanguage expectedLanguage) { // Arrange var fileExtension = "XXX"; var contentTypeMock = new Mock <IContentType>(); contentTypeServiceMock.Setup(x => x.ContentTypes).Returns(new[] { contentTypeMock.Object }); fileExtensionServiceMock.Setup(x => x.GetExtensionsForContentType(contentTypeMock.Object)).Returns(new[] { fileExtension }); contentTypeMock.Setup(x => x.IsOfType(bufferContentType)).Returns(true); // Act var result = testSubject.Detect($"foo.{fileExtension}", null); // Assert result.Should().HaveCount(1); result.First().Should().Be(expectedLanguage); }
public async Task <ComputerVisionAnalyzeResponse> AnalyzeAsync(string imageUrl, IFormFile file, AnalysisLanguage analysisLanguage, OcrLanguages ocrLanguage, ReadLanguage readLanguage) { // Setup _computerVisionClient = new ComputerVisionClient(new ApiKeyServiceClientCredentials(_subscriptionKey)) { Endpoint = _endpoint }; // Computer vision if (!string.IsNullOrWhiteSpace(imageUrl)) { var imageAnalysis = ComputerVisionAnalyzeImageByUrlAsync(imageUrl, analysisLanguage); var areaOfInterest = ComputerVisionGetAreaOfInterestByUrlAsync(imageUrl); var read = ComputerVisionReadByUrlAsync(imageUrl, readLanguage); var recognizedPrintedText = ComputerVisionRecognizedPrintedTextByUrlAsync(imageUrl, ocrLanguage); // Combine var task = Task.WhenAll(imageAnalysis, areaOfInterest, read, recognizedPrintedText); try { await task; return(new ComputerVisionAnalyzeResponse { ImageInfo = new ImageInfo { Src = imageUrl, Description = imageAnalysis.Result.Description?.Captions?.FirstOrDefault()?.Text.ToSentence(), Width = imageAnalysis.Result.Metadata.Width, Height = imageAnalysis.Result.Metadata.Height }, AnalyzeVisualFeatureTypes = AnalyzeVisualFeatureTypes, AnalyzeDetails = AnalyzeDetails, AnalysisResult = imageAnalysis.Result, AreaOfInterestResult = areaOfInterest.Result, OcrResult = recognizedPrintedText.Result, ReadResult = read.Result }); } catch (ComputerVisionErrorException ex) { var exceptionMessage = ex.Response.Content; var parsedJson = JToken.Parse(exceptionMessage); if (ex.Response.StatusCode == System.Net.HttpStatusCode.BadRequest) { return(new ComputerVisionAnalyzeResponse { ApiRequestErrorMessage = $"Bad request thrown by the underlying API from Microsoft:", ApiRequestErrorContent = parsedJson.ToString(Formatting.Indented) }); } else { return(new ComputerVisionAnalyzeResponse { OtherErrorMessage = $"Error thrown by the underlying API from Microsoft:", OtherErrorContent = parsedJson.ToString(Formatting.Indented) }); } } } else { using (var analyzeStream = new MemoryStream()) using (var areaOfInterestStream = new MemoryStream()) using (var readStream = new MemoryStream()) using (var ocrStream = new MemoryStream()) using (var outputStream = new MemoryStream()) { // Get initial value await file.CopyToAsync(analyzeStream); // Duplicate for parallel access to the streams analyzeStream.Seek(0, SeekOrigin.Begin); await analyzeStream.CopyToAsync(areaOfInterestStream); analyzeStream.Seek(0, SeekOrigin.Begin); await analyzeStream.CopyToAsync(readStream); analyzeStream.Seek(0, SeekOrigin.Begin); await analyzeStream.CopyToAsync(ocrStream); analyzeStream.Seek(0, SeekOrigin.Begin); await analyzeStream.CopyToAsync(outputStream); // Reset the stream for consumption analyzeStream.Seek(0, SeekOrigin.Begin); areaOfInterestStream.Seek(0, SeekOrigin.Begin); readStream.Seek(0, SeekOrigin.Begin); ocrStream.Seek(0, SeekOrigin.Begin); outputStream.Seek(0, SeekOrigin.Begin); var imageAnalysis = ComputerVisionAnalyzeImageByStreamAsync(analyzeStream, analysisLanguage); var areaOfInterest = ComputerVisionGetAreaOfInterestByStreamAsync(areaOfInterestStream); var read = ComputerVisionReadByStreamAsync(readStream, readLanguage); var recognizedPrintedText = ComputerVisionRecognizedPrintedTextByStreamAsync(ocrStream, ocrLanguage); // Combine var task = Task.WhenAll(imageAnalysis, areaOfInterest, read, recognizedPrintedText); try { await task; // Get image for display var fileBytes = outputStream.ToArray(); var imageData = $"data:{file.ContentType};base64,{Convert.ToBase64String(fileBytes)}"; return(new ComputerVisionAnalyzeResponse { ImageInfo = new ImageInfo { Src = imageData, Description = imageAnalysis.Result.Description?.Captions?.FirstOrDefault()?.Text.ToSentence(), Width = imageAnalysis.Result.Metadata.Width, Height = imageAnalysis.Result.Metadata.Height }, AnalyzeVisualFeatureTypes = AnalyzeVisualFeatureTypes, AnalyzeDetails = AnalyzeDetails, AnalysisResult = imageAnalysis.Result, AreaOfInterestResult = areaOfInterest.Result, OcrResult = recognizedPrintedText.Result, ReadResult = read.Result }); } catch (ComputerVisionErrorException ex) { var exceptionMessage = ex.Response.Content; var parsedJson = JToken.Parse(exceptionMessage); if (ex.Response.StatusCode == System.Net.HttpStatusCode.BadRequest) { return(new ComputerVisionAnalyzeResponse { ApiRequestErrorMessage = $"Bad request thrown by the underlying API from Microsoft:", ApiRequestErrorContent = parsedJson.ToString(Formatting.Indented) }); } else { return(new ComputerVisionAnalyzeResponse { OtherErrorMessage = $"Error thrown by the underlying API from Microsoft:", OtherErrorContent = parsedJson.ToString(Formatting.Indented) }); } } } } }
public void Detect_WhenExtensionNotRegistered_ReturnsLanguageFromBufferContentType(string bufferContentType, AnalysisLanguage expectedLanguage) { // Arrange var contentTypeMock = new Mock <IContentType>(); contentTypeMock.Setup(x => x.IsOfType(bufferContentType)).Returns(true); // Act var result = testSubject.Detect("foo", contentTypeMock.Object); // Assert result.Should().HaveCount(1); result.First().Should().Be(expectedLanguage); }
private async Task <ImageAnalysis> ComputerVisionAnalyzeImageByStreamAsync(Stream imageStream, AnalysisLanguage analysisLanguage) { var visualFeatures = AnalyzeVisualFeatureTypes; // API does not support some visual features if analysis is not in English if (!AnalysisLanguage.en.Equals(analysisLanguage)) { visualFeatures = AnalyzeVisualFeatureLimitedTypes; } return(await _computerVisionClient.AnalyzeImageInStreamAsync( image : imageStream, visualFeatures : visualFeatures, details : AnalyzeDetails, language : analysisLanguage.ToString() )); }