public void Activate(string processId, ClassifierSettingsElastic settings, CancellationToken token) { try { GC.Collect(); machineResourceService.UpdateResourcesManually(); var freeMemInBytes = machineResourceService.Status.FreeMemory * 1024 * 1024; var dictionaryPaths = new List <string>(); foreach (var nGram in settings.ActivatedNGramList) { var directoryPath = string.Format("{0}/{1}/{2}", _dictionaryRootPath, settings.ServiceId, nGram); var fileList = IOHelper.GetFilesInFolder(directoryPath, DictionaryProtoBuf.GetExtension()) .Where(file => settings.ActivatedTagIdList.Contains(Path.GetFileNameWithoutExtension(file))); dictionaryPaths.AddRange(fileList); } var sizeInBytes = dictionaryPaths.Sum(f => new FileInfo(f).Length); if (freeMemInBytes > 0 && freeMemInBytes < sizeInBytes * Constants.DictionaryInMemoryMultiplier) { throw new Common.Exceptions.OutOfResourceException(ServiceResources.NotEnoughResourceToActivateService); } var service = serviceQuery.Get(settings.ServiceId); service.Status = (int)ServiceStatusEnum.Busy; serviceQuery.Update(service.Id, service); var lockObject = new object(); var counter = 0; var allCount = dictionaryPaths.Count; var deserializedDics = new ConcurrentBag <DictionaryProtoBuf>(); Parallel.ForEach(dictionaryPaths, parallelService.ParallelOptions(), (path, loopState) => { token.ThrowIfCancellationRequested(); deserializedDics.Add(BaseProtoBuf.DeSerialize <DictionaryProtoBuf>(path)); lock (lockObject) { if (++counter % 15 == 0) { processHandler.Changed(processId, Math.Round(counter / (double)allCount * 100, 2)); } } }); var globalStoreClassifier = new GlobalStoreClassifier(); if (deserializedDics.Any()) { var scorersDic = deserializedDics.GroupBy(d => d.Id).ToDictionary(d => d.Key, d => new Cerebellum.Scorer.PeSScorer(d.ToDictionary(di => di.NGram, di => di.Dictionary))); globalStoreClassifier.ClassifierScorers = scorersDic; } var tagsDics = settings.Tags.ToDictionary(t => t.Id, t => t); var tagsDic = settings.Tags.ToDictionary( t => t.Id, t => tagService.GetTagModel(settings.DataSetName, t.Id, false, tagsDics) ); var analyzeQuery = queryFactory.GetAnalyzeQuery(settings.DataSetName); var emphasizedTagsWords = new Dictionary <string, List <string> >(); foreach (var tagId in settings.EmphasizedTagIdList) { var tokens = analyzeQuery.Analyze(tagsDic[tagId].Name, 1).ToList(); emphasizedTagsWords.Add(tagId, tokens); } globalStoreClassifier.ClassifierEmphasizedTagIds = emphasizedTagsWords; globalStoreClassifier.ClassifiersSettings = settings; globalStoreClassifier.ClassifierTags = tagsDic; globalStoreClassifier.ClassifierParentTagIds = tagsDic.SelectMany(td => td.Value.Properties.Paths .Select(p => p.Id)).Distinct().ToDictionary(p => p, p => p); GlobalStore.ActivatedClassifiers.Add(settings.ServiceId, globalStoreClassifier); processHandler.Finished(processId, string.Format(ServiceResources.SuccessfullyActivated_0_Service_1, ServiceTypeEnum.Classifier, service.Name)); service.Status = (int)ServiceStatusEnum.Active; serviceQuery.Update(service.Id, service); } catch (Exception ex) { var service = serviceQuery.Get(settings.ServiceId); service.Status = (int)ServiceStatusEnum.Prepared; serviceQuery.Update(service.Id, service); if (GlobalStore.ActivatedClassifiers.IsExist(settings.ServiceId)) { GlobalStore.ActivatedClassifiers.Remove(settings.ServiceId); } if (ex.InnerException != null && ex.InnerException is OperationCanceledException) { processHandler.Cancelled(processId); } else { processHandler.Interrupted(processId, ex); } GC.Collect(); } }
public void Add(string id, GlobalStoreClassifier classifier) { ClassifierDictionary.Add(id, classifier); }