public Task <IRecognitionResults> ExecuteAsync() { // ToDo: preserve image order // ToDo: make async var pixes = _pixFactory.Create(_imageFile.Path); var results = new RecognitionResults(); var options = new ParallelOptions { MaxDegreeOfParallelism = 10 }; Parallel.ForEach(pixes, options, pix => { IPix preprocessedPix; using (pix) preprocessedPix = Preprocess(pix); using (preprocessedPix) { using var engine = _engineFactory.Create(); var result = engine.Process(preprocessedPix); var recognitionResult = new RecognitionResult(result); results.BlockingAdd(recognitionResult); } }); return(Task.FromResult <IRecognitionResults>(results)); }
internal static RecognitionResults ToDomainEntity(this DbResult entity) { if (entity.IsValid == true) { RecognitionResults recognitionResult = new RecognitionResults { IsValid = true, Coords = new Points { TopLeft = new Point { X = entity.X1.Value, Y = entity.Y1.Value }, BottomRight = new Point { X = entity.X2.Value, Y = entity.Y2.Value } }, PhotoID = entity.PhotoId }; return(recognitionResult); } else { RecognitionResults recognitionResult = new RecognitionResults { IsValid = false, PhotoID = entity.PhotoId }; return(recognitionResult); } }
public RecognitionResults GetRecognitionResultsByPhotoId(int photoId) { if (photoId <= 0) { logger.LogWarning("Ivalid Photo ID"); throw new ArgumentException("Photo ID cannot be equal zero or less"); } using (var context = new PsqlContext()) { try { var recResult = context.Results. Where(p => p.PhotoId == photoId).FirstOrDefault(); if (recResult == null) { logger.LogWarning("RecognitionResults is not found"); throw new Exception("RecognitionResults with that Photo ID is not found"); } RecognitionResults result = recResult.ToDomainEntity(); logger.LogDebug("RecognitionResults is found"); return(result); } catch { logger.LogWarning("Error"); throw; } } }
internal static DbResult ToDbEntity(this RecognitionResults entity) { if (entity.IsValid == true) { DbResult result = new DbResult { IsValid = true, X1 = entity.Coords.TopLeft.X, X2 = entity.Coords.BottomRight.X, Y1 = entity.Coords.TopLeft.Y, Y2 = entity.Coords.BottomRight.Y, PhotoId = entity.PhotoID }; return(result); } else { DbResult result = new DbResult { IsValid = false, X1 = null, X2 = null, Y1 = null, Y2 = null, PhotoId = entity.PhotoID }; return(result); } }
public Task <RecognitionResults> TryGetResults(string sessionUID, int photoId) { Points points = new Points(new Point(0, 0), new Point(10, 10)); RecognitionResults results = new RecognitionResults(true, points, 2); return(Task.Run(() => { return results; })); }
private IRecognitionResultsRepository GetRecognitionResults(MockRepository repository, RecognitionResults testRecognitionResults) { return(recognitionResultsRepository = repository.Of <IRecognitionResultsRepository>() .Where(rr => rr.GetRecognitionResultsByPhotoId(It.IsAny <int>()) == testRecognitionResults) .First()); }
private void InstantiateRepositories(ApplicationUser user, out ISessionRepository sessionRepository, out IPhotoRepository photoRepository, out IRecognitionResultsRepository recognitionResultsRepository) { var repository = new MockRepository(MockBehavior.Default); image = new byte[] { 0x20, 0x20, 0x20 }; testResults = new RecognitionResults(true, new Points(new Point(0, 1), new Point(2, 3)), photoId); var timeStamp = new DateTime(); var userId = user.Id; Session testSession = new Session(user.Id, timeStamp, timeStamp, SessionStatus.ACTIVE); testSession.SessionID = 1; Photo testPhoto = new Photo(testSession.SessionID, image, timeStamp); testPhoto.PhotoID = 2; sessionRepository = repository.Of <ISessionRepository>() .Where(sr => sr.GetLastSessionForUser(It.IsAny <int>()) == testSession) .First(); photoRepository = repository.Of <IPhotoRepository>() .Where(pr => pr.GetLastPhotoInSession(It.IsAny <int>()) == testPhoto) .First(); recognitionResultsRepository = repository.Of <IRecognitionResultsRepository>() .Where(rr => rr.GetRecognitionResultsByPhotoId(It.IsAny <int>()) == testResults) .First(); }
public bool Update(RecognitionResults entity) { using (logger.BeginScope(nameof(this.Update))) { if (entity == null) { logger.LogWarning("ArgumentNullException while updating Recognition Result in DB"); return(false); } using (var context = new PsqlContext()) { try { DbResult dbResult = entity.ToDbEntity(); context.Results.Update(dbResult); context.SaveChanges(); logger.LogDebug("RecognitionResult updated successfully"); return(true); } catch (Exception e) { logger.LogWarning(e.ToString() + " while updating pulse in DB"); return(false); } } } }
public Task <IRecognitionResults> ExecuteAsync() { using var document = _pdfDocumentFactory.Open(_pdfFile.Path); if (document.ContainsText()) { return(Task.FromResult(document.ExtractText())); } var storedImages = ExtractImages(document); var pixes = storedImages.SelectMany(x => _pixFactory.Create(x.Path)); var results = new RecognitionResults(); var options = new ParallelOptions { MaxDegreeOfParallelism = 10 }; Parallel.ForEach(pixes, options, pix => { IPix preprocessedPix; using (pix) preprocessedPix = Preprocess(pix); using (preprocessedPix) { using var engine = _engineFactory.Create(); var result = engine.Process(preprocessedPix); var recognitionResult = new RecognitionResult(result); results.BlockingAdd(recognitionResult); } }); return(Task.FromResult((IRecognitionResults)results)); }
public static IRecognitionResults ExtractText(this PdfDocument document) { var results = new RecognitionResults(); foreach (var page in document.Pages) { var pageTexts = page.ExtractText(); var result = new RecognitionResult(string.Join(string.Empty, pageTexts)); results.Add(result); } return(results); }
public async void GetRecognitionResultsTest(bool recognitionResultsArePresent) { var user = new ApplicationUser { UserName = recognitionResultsArePresent ? "UserTest" : "UserTest2", Email = "*****@*****.**" }; await app.UserManager.CreateAsync(user, "UserTest@123"); await app.SetUser(user); DateTime timeStamp = new DateTime(); var image = new byte[] { 0x20, 0x20, 0x20 }; var sessionId = 1; var photoId = 2; var userId = user.Id; Points coords = new Points(new Point(0, 1), new Point(2, 3)); bool testIsValid = true; RecognitionResults testRecognitionResults = recognitionResultsArePresent ? new RecognitionResults(testIsValid, coords, photoId) : null; Session testSession = new Session(userId, timeStamp, timeStamp, SessionStatus.ACTIVE); testSession.SessionID = sessionId; Photo testPhoto = new Photo(testSession.SessionID, image, timeStamp); testPhoto.PhotoID = photoId; var repository = new MockRepository(MockBehavior.Default); IRecognitionResultsRepository recognitionResultsRepository = repository.Of <IRecognitionResultsRepository>() .Where(rr => rr.GetRecognitionResultsByPhotoId(It.IsAny <int>()) == testRecognitionResults) .First(); PhotoController photoController = CreateController(testSession, testPhoto); var result = (await photoController.GetRecognitionResults( app.UserManager, recognitionResultsRepository) as OkObjectResult).Value; RecognitionResults requestRecognitionResults = getRecognitionResultsFromJsonResult(result, photoId); if (recognitionResultsArePresent) { Assert.Equal(testRecognitionResults, requestRecognitionResults); } else { Assert.Null(requestRecognitionResults); } }
protected internal void AddRecognitionResult(ResultText resultText) { if (resultText == null) { throw new ArgumentNullException(nameof(resultText)); } if (RecognitionResults.Any(x => x.Id == resultText.Id)) { RecognitionResults.Remove(RecognitionResults.SingleOrDefault(x => x.Id == resultText.Id)); } RecognitionResults.Add(resultText); }
//throws an exception if the status code falls outside the range 200–299 public async Task <RecognitionResults> TryGetResults(string sessionUid, int photoId) { using (var client = new HttpClient()) { client.BaseAddress = analyticsServerUri; var response = await client.GetAsync($"rest/result/?session_uid={sessionUid}"); if (response.StatusCode.Equals(HttpStatusCode.Conflict)) { return(null); } if (!response.StatusCode.Equals(HttpStatusCode.OK)) { throw new HttpRequestException(); } var stringResponse = await response.Content.ReadAsStringAsync(); var responseResults = JsonConvert.DeserializeObject <RecognitionResultsResponse>(stringResponse); var recognitionResults = new RecognitionResults( responseResults.IsValid, new Points( new Comptech.Backend.Data.DomainEntities.Point( responseResults.TopLeft.X, responseResults.TopLeft.Y ), new Comptech.Backend.Data.DomainEntities.Point( responseResults.BottomRight.X, responseResults.BottomRight.Y ) ), photoId ); return(recognitionResults); } }
/// <summary> /// Add a recognition result to the list of results. The list is orderd by probability /// </summary> /// <param name="Probability"></param> /// <param name="Value"></param> public void AddRecognitionResult(double Probability, String Value) { RecognitionResult newResult; newResult = null; int iNewPosition = RecognitionResults.Count; if (Probability > 0.01) { newResult = new RecognitionResult(); newResult.Content = Value; newResult.Probability = Probability; while (iNewPosition > 0 && RecognitionResults[iNewPosition - 1].Probability < Probability) { iNewPosition--; } if (iNewPosition < 4) { RecognitionResults.Insert(iNewPosition, newResult); } } }
internal void StartWordforming(RecognitionResults recognitionResults) { if (!VocabularyLoaded) { return; } if (Interlocked.CompareExchange(ref wordformingInProgress, 1, 0) == 1) { return; } Task.Run(() => { List <Word> bestWords = FindMostPowerfulMatchingWord(this.Vocabulary, recognitionResults.UsableLetters, 8); if (bestWords.Count > 0) { LastScrabbleResult = new ScrabbleResult(bestWords[0], bestWords); } else { LastScrabbleResult = new ScrabbleResult(null, new List <Word>()); } wordformingInProgress = 0; }); }
/// <summary> /// This functions splits a character in multiple components. Used for splitting connected characters /// </summary> public void Split(ShapeNet ShapeNet) { List <int> SplitLines; //Step 1: Get the position for possible splits SplitLines = SplitDeterminePositions(); if (SplitLines.Count == 0) { return; } RecognitionResults.Clear(); //Step 2: Find the combination of the best (highest scores) recognised components List <PageComponent> Components = new List <PageComponent>(0); PageComponent newComponent; PageComponent prevComponent; Rectangle SplitArea; int start, end; SplitLines.Insert(0, 0); SplitLines.Add(Width - 1); start = 0; end = 1; while (end < SplitLines.Count) { SplitArea = new Rectangle(SplitLines[start] + 1, 0, SplitLines[end] - SplitLines[start] - 2, Height); if (SplitArea.Width > 0 && SplitArea.Height > 0) { while (NumberPixelsOnRow(BinaryBytes, SplitArea, 0, 0) == 0) { SplitArea.Y = SplitArea.Y + 1; SplitArea.Height = SplitArea.Height - 1; } while (NumberPixelsOnRow(BinaryBytes, SplitArea, SplitArea.Height - 1, 0) == 0) { SplitArea.Height = SplitArea.Height - 1; } newComponent = PartialCopy(SplitArea); ExtractFeatures.ExecuteExtractFeatures(newComponent, false); RecogniseComponent.RecogniseWithoutConnectedRepair(ShapeNet, newComponent); if (Components.Count > 0 && end - start > 1) { prevComponent = Components.Last(); if (prevComponent.ContentProbability < newComponent.ContentProbability && newComponent.Content != "connected" && newComponent.Content != "garbage") { Components.Remove(prevComponent); Components.Add(newComponent); } else { start = end - 1; end--; } } else { Components.Add(newComponent); } } end++; } //Add the new recognition result RecognitionResult newResult; newResult = new RecognitionResult(); newResult.Content = ""; newResult.Probability = 0; foreach (PageComponent Component in Components) { newResult.Content += Component.Content; newResult.Probability += Component.ContentProbability; } newResult.Probability = newResult.Probability / Components.Count; RecognitionResults.Add(newResult); //Save a copy of the image to the disc if (DebugTrace.DebugTrace.TraceFeatures) { String ComponentID = "000000" + ID; ComponentID = ComponentID.Substring(ComponentID.Length - 6); foreach (int SplitLine in SplitLines) { int pointer = SplitLine; for (int y = 0; y < Height; y++) { if (BinaryBytes[SplitLine, y] == 0xFF) { BinaryBytes[SplitLine, y] = 0x10; } pointer += Stride; } } Bitmap Bitmap = DebugTrace.DebugTrace.CreateBitmapFromByteArray(BinaryBytes, new Size(Width, Height)); String Filename = DebugTrace.DebugTrace.TraceFeatureFolder + "image_" + ComponentID + "_split.bmp"; Bitmap.Save(Filename); } }
public async Task <IActionResult> GetRecognitionResults( [FromServices] UserManager <ApplicationUser> userManager, [FromServices] IRecognitionResultsRepository recognitionResultsRepository) { using (_logger.BeginScope(nameof(GetRecognitionResults))) { try { var user = await userManager.GetUserAsync(HttpContext.User); int userId = int.Parse(await userManager.GetUserIdAsync(user)); Session session = _sessionRepository.GetLastSessionForUser(userId); if (session == null) { return(BadRequest("Session is not started yet.")); } _logger.LogInformation("Trying to get photo for session {0}", session.SessionID); Photo photo = _photoRepository.GetLastPhotoInSession(session.SessionID); _logger.LogInformation("Trying to get recognition results for photo {0}", photo.PhotoID); RecognitionResults recognitionResults = recognitionResultsRepository.GetRecognitionResultsByPhotoId(photo.PhotoID); if (recognitionResults == null) { _logger.LogInformation("Recognition results are not ready for photo {0} yet", photo.PhotoID); return(Ok(new { recognitionResult = (string)null })); } _logger.LogInformation( "Recognition results for photo {0} were successfully retrieved", photo.PhotoID); return(Ok(new { recognitionResult = new { valid = recognitionResults.IsValid, coordinates = new { topLeft = new { x = recognitionResults.Coords.TopLeft.X, y = recognitionResults.Coords.TopLeft.Y, }, bottomRight = new { x = recognitionResults.Coords.BottomRight.X, y = recognitionResults.Coords.BottomRight.Y, } } } })); } catch (Exception e) { _logger.LogError("Exception caught: {0}, {1}", e.Message, e.StackTrace); return(BadRequest(e.Message)); } } }
private async Task RunTaskForRecognition(RecognitionTask recognitionTask) { string recognitionSessionUID = null; int pollingTimeout = int.Parse(configuration.GetSection("AnalyticsPollingTimeout").Value); while (recognitionSessionUID == null) { try { recognitionSessionUID = await analyticsClient.RequestRecognitionSession(recognitionTask.ModelName); if (recognitionSessionUID != null) { break; } } catch (Exception exception) { logger.LogError("Exception caught: {0}, {1}", exception.Message, exception.StackTrace); } Thread.Sleep(pollingTimeout); } Photo photo = photoRepository.GetPhotoById(recognitionTask.PhotoId); bool isPhotoUploaded = false; int photoUploadTryCount = int.Parse(configuration.GetSection("PhotoUploadTryCount").Value); for (int i = 0; i < photoUploadTryCount; i++) { try { await analyticsClient.UploadPhoto(photo.Image, recognitionSessionUID); isPhotoUploaded = true; break; } catch (Exception exception) { logger.LogError("Exception caught: {0}, {1}", exception.Message, exception.StackTrace); Thread.Sleep(pollingTimeout); } } if (!isPhotoUploaded) { queue.Enqueue(recognitionTask); return; } RecognitionResults recognitionResults = null; while (recognitionResults == null) { try { recognitionResults = await analyticsClient.TryGetResults(recognitionSessionUID, recognitionTask.PhotoId); if (recognitionResults != null) { break; } } catch (Exception exception) { logger.LogError("Exception caught: {0}, {1}", exception.Message, exception.StackTrace); } Thread.Sleep(pollingTimeout); } recognitionRepository.Add(recognitionResults); }
public void SetCamera(CameraColorRGB camera) { lock (_lock) { if (camera == null) { _camera = null; _somList = null; } else { _camera = camera; _somList = new SOMList(new[] { camera.PixelWidthHeight, camera.PixelWidthHeight }, Convolutions.GetEdgeSet_Sobel(), discardDupes: _somDiscardDupes, isColor2D: _somIsColor); // the edge detect really helps. without it, there tended to just be one big somnode after a while } _shortTermMemory.Clear(); _nonLifeEventSnapshots.Clear(); _importantEvents.Clear(); _recognizers = null; _results = null; } }
public bool Add(RecognitionResults entity) { rep.Add(entity); return(true); }
public bool Update(RecognitionResults entity) { throw new NotImplementedException(); }
/// <summary> /// This gets called on a regular basis. It gets the camera's current image, stores that image in various memory lists, and /// classifies the image /// </summary> /// <returns> /// True: This method has populated the output neurons /// False: The output neurons need to be set to zero by the caller /// </returns> private bool Tick() { CameraColorRGB camera = _camera; if (camera == null) { return false; } //TODO: This portion may need to run in a different thread Tuple<long, IBitmapCustom> bitmap = camera.Bitmap; if (bitmap == null) { return false; } RecognitionResults results = _results; if (results != null && results.Token == bitmap.Item1) { // The bitmap is old, and has already be classified return false; } // The camera output is ARGB colors from 0 to 255. Convert to values from 0 to 1 double[] nnInput; if (_isColor) { // Color, 3 values per pixel nnInput = bitmap.Item2.GetColors_Byte(). SelectMany(o => new[] { o[1] / 255d, o[2] / 255d, o[3] / 255d }). // o[0] is alpha ToArray(); } else { // Grayscale, 1 value per pixel nnInput = bitmap.Item2.GetColors_Byte(). Select(o => UtilityWPF.ConvertToGray(o[1], o[2], o[3]) / 255d). // o[0] is alpha ToArray(); } _latestImage = nnInput; _shortTermMemory.StoreSnapshot(nnInput); _nonLifeEventSnapshots.Add(nnInput); var somList = _somList; if (somList != null) { somList.Add(nnInput); } LifeEventToVector lifeEvents = _lifeEvents; if (lifeEvents == null) { // Lifeevents defines how many output neurons there are. So if it's not set, then there's nothing // to train to return false; } // Recognize the image, and set the output neurons results = RecognizeImage(nnInput, bitmap.Item1, bitmap.Item2.Width, bitmap.Item2.Height, _finalResolution, _recognizers, lifeEvents, _convolution, _isColor); lock (_lock) { _results = results; for (int cntr = 0; cntr < results.Output.Length; cntr++) { _outputNeurons[cntr].Value = results.Output[cntr]; } } return true; }
static async Task <ImageAnalysisResult> AnalyzeImage(string filePath, string factoryId, string cameraId, EnvSettings.AIModule module, EnvSettings.AIModule.Tag tag, string outputFolder) { try { ImageAnalysisResult analyzeResult = null; // Get output directory details string storageAccountName = _envSettings.GetProperty("StorageAccountName"); string dbeShareContainerName = _envSettings.GetProperty("DBEShareContainerName"); string flaggedFolder = "flagged"; string nonFlaggedFolder = "safe"; // Read image RecognitionResults recognitionResults = null; byte[] byteArray = File.ReadAllBytes(filePath); using (ByteArrayContent content = new ByteArrayContent(byteArray)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); try { var response = await _httpClient.PostAsync(module.ScoringEndpoint, content); if (!response.IsSuccessStatusCode) { _consoleLogger.LogError($"Failed to make POST request to module {module.ScoringEndpoint}. Response: {response.ReasonPhrase}"); return(null); } else { _consoleLogger.LogDebug($"POST request to module {module.ScoringEndpoint} was successful"); var contentString = await response.Content.ReadAsStringAsync(); recognitionResults = JsonConvert.DeserializeObject <RecognitionResults>(contentString); } } catch (Exception e) { _consoleLogger.LogError($"AnalyzeImage:PostAsync failed to make POST request to module {module.ScoringEndpoint}. Exception: {e}"); return(null); } } /// Need to differentiate between the current tag being flagged and /// any other tags from this module in order to mark the image appropriately. /// Logic invites to think that in case the current tag is flagged, /// it will also be in the all flagged tags list. var currentFlaggedTag = recognitionResults.Predictions.Where(x => x.TagName == tag.Name && x.Probability >= tag.Probability); var allFlaggedTags = recognitionResults.Predictions.Where(x => module.Tags.Where(y => x.TagName == y.Name && x.Probability >= y.Probability).Count() > 0); // Create analyze result object string fileName = Path.GetFileName(filePath); if (currentFlaggedTag.Count() > 0) { string imageUri = $"https://{storageAccountName}.blob.core.windows.net/{dbeShareContainerName}/{factoryId}/{cameraId}/{flaggedFolder}/{fileName}"; _consoleLogger.LogInformation($"---> Found tags in image {filePath}: {string.Join(", ", currentFlaggedTag.Select(x => x.TagName))}"); // Create message content string datePattern = @"^(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})(\d{3})$"; var match = Regex.Match(Path.GetFileNameWithoutExtension(fileName), datePattern); DateTime timestamp = new DateTime( Convert.ToInt32(match.Groups[1].Value), Convert.ToInt32(match.Groups[2].Value), Convert.ToInt32(match.Groups[3].Value), Convert.ToInt32(match.Groups[4].Value), Convert.ToInt32(match.Groups[5].Value), Convert.ToInt32(match.Groups[6].Value), Convert.ToInt32(match.Groups[7].Value)); analyzeResult = new ImageAnalysisResult() { ImageUri = imageUri, Timestamp = timestamp, Results = ImageAnalysisResult.Result.Results(currentFlaggedTag, module), }; // Get flat results for reporting purposes FlatImageAnalysisResult[] flatImageResults = FlatImageAnalysisResult.Convert(factoryId, cameraId, analyzeResult); foreach (var flatResult in flatImageResults) { // Create hub message and set its properties var message = new Message(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(flatResult))); message.Properties.Add("messageType", "reporting"); // Send reporting message await SendMessageToHub(message); // Log it _consoleLogger.LogTrace($"Sent reporting message for camera {cameraId}"); } } else { _consoleLogger.LogTrace($"No tags were found in image {filePath}"); } // Save image to output directory string destinationFolder = allFlaggedTags.Count() > 0 ? flaggedFolder : nonFlaggedFolder; // Set output directory string outputDirectory = Path.Combine(outputFolder, factoryId, cameraId, destinationFolder); if (!Directory.Exists(outputDirectory)) { Directory.CreateDirectory(outputDirectory); } // Save image string imageOutputPath = Path.Combine(outputDirectory, fileName); File.WriteAllBytes(imageOutputPath, byteArray); _consoleLogger.LogTrace($"Moving image to final destination folder {imageOutputPath}"); // Save payload string fileOutputPath = Path.Combine(outputDirectory, Path.ChangeExtension(fileName, "json")); File.WriteAllText(fileOutputPath, JsonConvert.SerializeObject(recognitionResults.Predictions)); // Delete image from local folder File.Delete(filePath); return(analyzeResult); } catch (Exception e) { _consoleLogger.LogCritical("AnalyzeImage caught an exception: {0}", e); return(null); } }