public void ExportWords(string processId, string dataSetName, List <string> tagIdList, List <int> nGramList, CancellationToken token, string hostUrl) { try { var globalStoreDataSet = GlobalStore.DataSets.Get(dataSetName); var dataSet = globalStoreDataSet.DataSet; var allDicCount = tagIdList.Count * nGramList.Count; /*ZIP time*/ allDicCount += (allDicCount / 10); var counter = 0; foreach (var nGram in nGramList) { var subsetCreator = new SubsetCreator(dataSet.Name, new List <string> { DocumentElastic.TextField }, dataSet.InterpretedFields.Select(DocumentQuery.MapDocumentObjectName).ToList(), nGram, queryFactory, globalStoreDataSet.AttachmentFields); var tempDirectoryPath = string.Format("{0}/{1}/{2}", siteConfig.Directory.Temp, processId, nGram); System.IO.Directory.CreateDirectory(tempDirectoryPath); foreach (var tagId in tagIdList) { if (token.IsCancellationRequested) { processHandler.Cancelled(processId); return; } //var filePath = $"{dictionariesPath}/{DictionaryProtoBuf.GetFileName(tagId)}"; //var dicProtoBuf = BaseProtoBuf.DeSerialize<DictionaryProtoBuf>(filePath); var subset = subsetCreator.CreateByTag(tagId, dataSet.TagField); var csvPath = $"{tempDirectoryPath}/{tagId}.csv"; CsvHelper.CreateCsv(csvPath, subset.WordsWithOccurences.Select(d => new List <string> { d.Key, d.Value.Tag.ToString(), d.Value.Corpus.ToString() }).ToList()); processHandler.Changed(processId, Math.Round(++counter / (double)allDicCount * 100, 2)); } } /*time to ZIP the results*/ var zipFileName = string.Format("{0}.zip", processId); var dirToZipPath = string.Format("{0}/{1}", siteConfig.Directory.Temp, processId); var resultZipPath = string.Format("{0}/{1}.zip", siteConfig.Directory.User, processId); ZipHelper.CompressFolder(dirToZipPath, resultZipPath); var zipUrl = string.Format("{0}{1}/{2}", hostUrl, Common.Constants.FilesPath, zipFileName); processHandler.Finished(processId, string.Format("{0}\n{1}", string.Format(TagResources.SuccessfullyExportedWordsFrom_0_TagsOfDataset_1, tagIdList.Count, dataSet.Name), string.Format(TagResources.ExportFileCanBeDownloadFromHere_0, zipUrl))); } catch (Exception ex) { processHandler.Interrupted(processId, ex); } }
public void Prepare(string processId, ClassifierSettingsElastic settings, CancellationToken token) { var directoryPath = GetDirectoryPath(settings.ServiceId); try { var service = serviceQuery.Get(settings.ServiceId); service.Status = (int)ServiceStatusEnum.Busy; serviceQuery.Update(service.Id, service); IOHelper.SafeDeleteDictionary(directoryPath, true); var globalStoreDataSet = GlobalStore.DataSets.Get(settings.DataSetName); var dataSet = globalStoreDataSet.DataSet; var allDicCount = settings.NGramList.Count * settings.Tags.Count; var counter = 0; var lockObject = new object(); Directory.CreateDirectory(directoryPath); foreach (var nGram in settings.NGramList) { var subsetCreator = new SubsetCreator(dataSet.Name, new List <string> { DocumentElastic.TextField }, dataSet.InterpretedFields.Select(DocumentQuery.MapDocumentObjectName).ToList(), nGram, queryFactory, globalStoreDataSet.AttachmentFields); var actualDirectory = string.Format("{0}/{1}", directoryPath, nGram); Directory.CreateDirectory(actualDirectory); Parallel.ForEach(settings.Tags, parallelService.ParallelOptions(), (tag, loopState) => { token.ThrowIfCancellationRequested(); var subset = subsetCreator.CreateByTag(tag.Id, dataSet.TagField); var dictionary = new TwisterAlgorithm( subset, true, false, settings.CompressSettings.CompressCategoryOccurence, settings.CompressSettings.CompressDataSetOccurence, (LogicalOperatorEnum)settings.CompressSettings.CompressOperator).GetDictionary(); var dicProtoBuf = new DictionaryProtoBuf { Id = tag.Id, Dictionary = dictionary, NGram = nGram }; lock (lockObject) { dicProtoBuf.Serialize(string.Format("{0}/{1}", actualDirectory, dicProtoBuf.GetFileName())); processHandler.Changed(processId, Math.Round(++counter / (double)allDicCount * 100, 2)); } }); } processHandler.Finished(processId, string.Format(ServiceResources.SuccessfullyPrepared_0_Service_1, ServiceTypeEnum.Classifier, service.Name)); service.Status = (int)ServiceStatusEnum.Prepared; serviceQuery.Update(service.Id, service); } catch (Exception ex) { var service = serviceQuery.Get(settings.ServiceId); service.Status = (int)ServiceStatusEnum.New; serviceQuery.Update(service.Id, service); IOHelper.SafeDeleteDictionary(directoryPath, true); if (ex.InnerException != null && ex.InnerException is OperationCanceledException) { processHandler.Cancelled(processId); } else { processHandler.Interrupted(processId, ex); } } }
public void Prepare(string processId, PrcSettingsElastic settings, CancellationToken token) { var directoryPath = GetDirectoryPath(settings.ServiceId); try { var service = serviceQuery.Get(settings.ServiceId); service.Status = (int)ServiceStatusEnum.Busy; serviceQuery.Update(service.Id, service); IOHelper.SafeDeleteDictionary(directoryPath, true); var globalStoreDataSet = GlobalStore.DataSets.Get(settings.DataSetName); var dataSet = globalStoreDataSet.DataSet; var progress = new Progress(settings.Tags.Count); var subsetCreator = new SubsetCreator(dataSet.Name, new List <string> { DocumentElastic.TextField }, dataSet.InterpretedFields.Select(DocumentQuery.MapDocumentObjectName).ToList(), 1, queryFactory, globalStoreDataSet.AttachmentFields); Directory.CreateDirectory(directoryPath); var logPrefix = $"Prc Prepare {processId}"; logger.LogInformation($"{logPrefix} starts with ParallelLimit: {parallelService.ParallelLimit}, Tags Count: {settings.Tags.Count}"); var lockObject = new object(); Parallel.ForEach(settings.Tags, parallelService.ParallelOptions(), (tag, loopState) => { token.ThrowIfCancellationRequested(); logger.LogInformation($"{logPrefix} preparing Tag: `{tag}`"); var subset = subsetCreator.CreateByTag(tag.Id, dataSet.TagField); var algorithm = new TwisterAlgorithm( subset, true, true, settings.CompressSettings.CompressCategoryOccurence, settings.CompressSettings.CompressDataSetOccurence, (LogicalOperatorEnum)settings.CompressSettings.CompressOperator); algorithm.InitTagDictionary(); var notNeededWords = subset.WordsWithOccurences.Keys.Except( algorithm.TagDictionary .Where(sd => sd.Value.PMI > 0) .Select(sd => sd.Key)).ToList(); var td = algorithm.GetDictionary(); foreach (var word in notNeededWords) { subset.WordsWithOccurences.Remove(word); } lock (lockObject) { //dictionary serialization var dicProtoBuf = new DictionaryProtoBuf { Id = tag.Id, Dictionary = td, NGram = 1 }; dicProtoBuf.Serialize(string.Format("{0}/{1}", directoryPath, dicProtoBuf.GetFileName())); //subset serialization var subsetProtoBuf = new SubsetProtoBuf { Id = tag.Id, WordsWithOccurences = subset.WordsWithOccurences, AllWordsOccurencesSumInTag = subset.AllWordsOccurencesSumInTag, AllOccurencesSumInCorpus = subset.AllWordsOccurencesSumInCorpus }; subsetProtoBuf.Serialize(string.Format("{0}/{1}", directoryPath, subsetProtoBuf.GetFileName())); progress.Step(); processHandler.Changed(processId, progress.Percent.Round(2)); } logger.LogInformation($"{logPrefix} prepared Tag: `{tag}`"); }); processHandler.Finished(processId, string.Format(ServiceResources.SuccessfullyPrepared_0_Service_1, ServiceTypeEnum.Prc, service.Name)); service.Status = (int)ServiceStatusEnum.Prepared; serviceQuery.Update(service.Id, service); } catch (Exception ex) { var service = serviceQuery.Get(settings.ServiceId); service.Status = (int)ServiceStatusEnum.New; serviceQuery.Update(service.Id, service); IOHelper.SafeDeleteDictionary(directoryPath, true); if (ex.InnerException != null && ex.InnerException is OperationCanceledException) { processHandler.Cancelled(processId); } else { processHandler.Interrupted(processId, ex); } } }