Esempio n. 1
0
        public async Task PredictAll()
        {
            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Working, "");
            try
            {
                Status = "starting ml model...";
                //load config
                var confDir    = Path.Join(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "lacmus");
                var configPath = Path.Join(confDir, "appConfig.json");
                _appConfig = await AppConfig.Create(configPath);

                var config = _appConfig.MlModelConfig;
                using (var model = new MLModel(config))
                {
                    await model.Init();

                    var count       = 0;
                    var objectCount = 0;
                    Status = "processing...";
                    foreach (var photoViewModel in _photos.Items)
                    {
                        try
                        {
                            photoViewModel.Annotation.Objects = await model.Predict(photoViewModel);

                            photoViewModel.BoundBoxes = photoViewModel.GetBoundingBoxes();
                            if (photoViewModel.BoundBoxes.Any())
                            {
                                photoViewModel.Photo.Attribute = Attribute.WithObject;
                                photoViewModel.IsHasObject     = true;
                            }
                            objectCount += photoViewModel.BoundBoxes.Count();
                            count++;
                            PredictProgress     = (double)count / _photos.Items.Count() * 100;
                            PredictTextProgress = $"{Convert.ToInt32(PredictProgress)} %";
                            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Working, $"Working | {(int)((double) count / _photos.Items.Count() * 100)} %, [{count} of {_photos.Items.Count()}]");
                            Console.WriteLine($"\tProgress: {(double) count / _photos.Items.Count() * 100} %");
                        }
                        catch (Exception e)
                        {
                            Log.Error(e, $"Unable to process file {photoViewModel.Path}. Slipped.");
                        }
                    }
                    Status = "stopping ml model...";
                    await model.Stop();

                    PredictTextProgress = $"predict {_photos.Count} photos.";
                    Log.Information($"Successfully predict {_photos.Count} photos. Find {objectCount} objects.");
                }
            }
            catch (Exception e)
            {
                Status = "error.";
                Log.Error(e, "Unable to get prediction.");
            }
            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Ready, "");
        }