public void MergeWordsOneChildTest() { var thisWord = Util.RandomWord(ProjId); thisWord = _wordRepo.Create(thisWord).Result; var mergeObject = new MergeWords { Parent = thisWord, Children = new List <MergeSourceWord> { new MergeSourceWord { SrcWordId = thisWord.Id } } }; var newWords = _mergeService.Merge(ProjId, new List <MergeWords> { mergeObject }).Result; // There should only be 1 word added and it should be identical to what we passed in Assert.That(newWords, Has.Count.EqualTo(1)); Assert.That(newWords.First().ContentEquals(thisWord)); // Check that the only word in the frontier is the new word var frontier = _wordRepo.GetFrontier(ProjId).Result; Assert.That(frontier, Has.Count.EqualTo(1)); Assert.That(frontier.First(), Is.EqualTo(newWords.First())); // Check that new word has the right history Assert.That(newWords.First().History, Has.Count.EqualTo(1)); Assert.That(newWords.First().History.First(), Is.EqualTo(thisWord.Id)); }
/// <summary> Checks if a word being added is an exact duplicate of a preexisting word </summary> public async Task <bool> WordIsUnique(Word word) { // Get all words from frontier var allWords = await _wordRepo.GetFrontier(word.ProjectId); // Find all words with matching vernacular var allVernaculars = allWords.FindAll(x => x.Vernacular == word.Vernacular); var foundDuplicateSense = false; var isUniqueWord = true; // Iterate over words with the same vernacular foreach (var matchingVern in allVernaculars) { // Iterate over senses of those words foreach (var newSense in word.Senses) { foundDuplicateSense = false; // Iterate over the senses of the new word foreach (var oldSense in matchingVern.Senses) { // If the new sense is a strict subset of the old one, then merge it in if (newSense.Glosses.All(oldSense.Glosses.Contains)) { foundDuplicateSense = true; // Add edited by and remove duplicates matchingVern.EditedBy.AddRange(word.EditedBy); matchingVern.EditedBy = matchingVern.EditedBy.Distinct().ToList(); // Add semantic domains and remove duplicates oldSense.SemanticDomains.AddRange(newSense.SemanticDomains); oldSense.SemanticDomains = oldSense.SemanticDomains.Distinct().ToList(); } } // If we never found a matching sense in the old word, the words are different if (!foundDuplicateSense) { break; } } // Update the word only if all the senses were duplicates if (foundDuplicateSense) { isUniqueWord = false; await Update(matchingVern.ProjectId, matchingVern.Id, matchingVern); } } return(isUniqueWord); }
public void AddWord() { var word = RandomWord(); var id = (_wordController.Post(_projId, word).Result as ObjectResult).Value as string; word.Id = id; Assert.AreEqual(word, _repo.GetAllWords(_projId).Result[0]); Assert.AreEqual(word, _repo.GetFrontier(_projId).Result[0]); var oldDuplicate = RandomWord(); var newDuplicate = oldDuplicate.Clone(); _ = _wordController.Post(_projId, oldDuplicate).Result; var result = (_wordController.Post(_projId, newDuplicate).Result as ObjectResult).Value as string; Assert.AreEqual(result, "Duplicate"); newDuplicate.Senses.RemoveAt(2); result = (_wordController.Post(_projId, newDuplicate).Result as ObjectResult).Value as string; Assert.AreEqual(result, "Duplicate"); newDuplicate.Senses = new List <Sense>(); result = (_wordController.Post(_projId, newDuplicate).Result as ObjectResult).Value as string; Assert.AreNotEqual(result, "Duplicate"); }
public async Task <IActionResult> GetProjectFrontierWords(string projectId) { if (!await _permissionService.HasProjectPermission(HttpContext, Permission.WordEntry)) { return(Forbid()); } var project = await _projRepo.GetProject(projectId); if (project is null) { return(NotFound(projectId)); } return(Ok(await _wordRepo.GetFrontier(projectId))); }
public async Task <IActionResult> GetFrontier(string projectId) { if (!_permissionService.HasProjectPermission(Permission.WordEntry, HttpContext)) { return(new ForbidResult()); } // Ensure project exists var project = _projectService.GetProject(projectId); if (project == null) { return(new NotFoundObjectResult(projectId)); } return(new ObjectResult(await _repo.GetFrontier(projectId))); }
public void TestDeleteAllWords() { var inWord1 = _wordRepo.Create(Util.RandomWord(_projId)).Result; var inWord2 = _wordRepo.Create(Util.RandomWord(_projId)).Result; var diffProjId = "OTHER_PROJECT"; var outWord = _wordRepo.Create(Util.RandomWord(diffProjId)).Result; _ = _wordController.DeleteProjectWords(_projId).Result; Assert.That(_wordRepo.GetAllWords(_projId).Result, Has.Count.Zero); Assert.That(_wordRepo.GetFrontier(_projId).Result, Has.Count.Zero); Assert.That(_wordRepo.GetAllWords(diffProjId).Result, Has.Count.EqualTo(1)); Assert.That(_wordRepo.GetFrontier(diffProjId).Result, Has.Count.EqualTo(1)); }
/// <summary> Exports information from a project to a lift package zip </summary> public string LiftExport(string projectId) { // The helper tag must be included because there are also SIL.Utilities. var util = new Utilities(); // Generate the zip dir. var exportDir = util.GenerateFilePath(Utilities.FileType.Dir, true, "", Path.Combine(projectId, "Export")); if (Directory.Exists(Path.Combine(exportDir, "LiftExport"))) { Directory.Delete(Path.Combine(exportDir, "LiftExport"), true); } var zipDir = Path.Combine(exportDir, "LiftExport", "Lift"); Directory.CreateDirectory(zipDir); // Add audio dir inside zip dir. var audioDir = Path.Combine(zipDir, "audio"); Directory.CreateDirectory(audioDir); var liftPath = Path.Combine(zipDir, "NewLiftFile.lift"); // noBOM will work with PrinceXML var liftWriter = new CombineLiftWriter(liftPath, ByteOrderStyle.BOM); var rangesDest = Path.Combine(zipDir, "NewLiftFile.lift-ranges"); // write header of lift document var header = $@" <ranges> <range id = ""semantic-domain-ddp4"" href = ""{rangesDest}""/> </ranges> <fields> <field tag = ""Plural""> <form lang = ""en""><text></text></form> <form lang = ""qaa-x-spec""><text> Class = LexEntry; Type = String; WsSelector = kwsVern </text></form> </field> </fields> "; liftWriter.WriteHeader(header); // Write out every word with all of its information var allWords = _repo.GetAllWords(projectId).Result; var frontier = _repo.GetFrontier(projectId).Result; var activeWords = frontier.Where(x => x.Senses.First().Accessibility == (int)State.Active).ToList(); // TODO: this is wrong, deleted is a subset of active, are not exclusive var deletedWords = allWords.Where(x => activeWords.Contains(x)).ToList(); foreach (var wordEntry in activeWords) { var entry = new LexEntry(); AddVern(entry, wordEntry, projectId); AddSenses(entry, wordEntry); AddAudio(entry, wordEntry, audioDir); liftWriter.Add(entry); } foreach (var wordEntry in deletedWords) { var entry = new LexEntry(); AddVern(entry, wordEntry, projectId); AddSenses(entry, wordEntry); AddAudio(entry, wordEntry, audioDir); liftWriter.AddDeletedEntry(entry); } liftWriter.End(); // Export semantic domains to lift-ranges var proj = _projService.GetProject(projectId).Result; var extractedPathToImport = Path.Combine(GetProjectDir(projectId), "Import", "ExtractedLocation"); var importLiftDir = ""; if (Directory.Exists(extractedPathToImport)) { importLiftDir = Directory.GetDirectories(extractedPathToImport).Select(Path.GetFileName).ToList().Single(); } var rangesSrc = Path.Combine(extractedPathToImport, importLiftDir, $"{importLiftDir}.lift-ranges"); // If there are no new semantic domains, and the old lift-ranges file is still around, just copy it if (proj.SemanticDomains.Count == 0 && File.Exists(rangesSrc)) { File.Copy(rangesSrc, rangesDest, true); } else // Make a new lift-ranges file { var liftRangesWriter = XmlWriter.Create(rangesDest, new XmlWriterSettings { Indent = true, NewLineOnAttributes = true }); liftRangesWriter.WriteStartDocument(); liftRangesWriter.WriteStartElement("lift-ranges"); liftRangesWriter.WriteStartElement("range"); liftRangesWriter.WriteAttributeString("id", "semantic-domain-ddp4"); // Pull from resources file with all English semantic domains var assembly = typeof(LiftService).GetTypeInfo().Assembly; var resource = assembly.GetManifestResourceStream("BackendFramework.Data.sdList.txt"); string sdList; using (var reader = new StreamReader(resource, Encoding.UTF8)) { sdList = reader.ReadToEndAsync().Result; } var sdLines = sdList.Split(Environment.NewLine); foreach (var line in sdLines) { if (line != "") { var items = line.Split("`"); WriteRangeElement(liftRangesWriter, items[0], items[1], items[2], items[3]); } } // Pull from new semantic domains in project foreach (var sd in proj.SemanticDomains) { WriteRangeElement(liftRangesWriter, sd.Id, Guid.NewGuid().ToString(), sd.Name, sd.Description); } liftRangesWriter.WriteEndElement(); //end semantic-domain-ddp4 range liftRangesWriter.WriteEndElement(); //end lift-ranges liftRangesWriter.WriteEndDocument(); liftRangesWriter.Flush(); liftRangesWriter.Close(); } // Export character set to ldml var ldmlDir = Path.Combine(zipDir, "WritingSystems"); Directory.CreateDirectory(ldmlDir); if (proj.VernacularWritingSystem != "") { LdmlExport(ldmlDir, proj.VernacularWritingSystem); } // Compress everything var destinationFileName = Path.Combine(exportDir, Path.Combine($"LiftExportCompressed-{proj.Id}_{string.Format("{0:yyyy-MM-dd_hh-mm-ss}", DateTime.Now)}.zip")); ZipFile.CreateFromDirectory(Path.GetDirectoryName(zipDir), destinationFileName); return(destinationFileName); }
/// <summary> Exports information from a project to a lift package zip </summary> /// <exception cref="MissingProjectException"> If Project does not exist. </exception> /// <returns> Path to compressed zip file containing export. </returns> public async Task <string> LiftExport( string projectId, IWordRepository wordRepo, IProjectRepository projRepo) { // Validate project exists. var proj = await projRepo.GetProject(projectId); if (proj is null) { throw new MissingProjectException($"Project does not exist: {projectId}"); } var vernacularBcp47 = proj.VernacularWritingSystem.Bcp47; // Generate the zip dir. var exportDir = FileStorage.GenerateLiftExportDirPath(projectId); var liftExportDir = Path.Combine(exportDir, "LiftExport"); if (Directory.Exists(liftExportDir)) { Directory.Delete(liftExportDir, true); } var projNameAsPath = Sanitization.MakeFriendlyForPath(proj.Name, "Lift"); var zipDir = Path.Combine(liftExportDir, projNameAsPath); Directory.CreateDirectory(zipDir); // Add audio dir inside zip dir. var audioDir = Path.Combine(zipDir, "audio"); Directory.CreateDirectory(audioDir); var liftPath = Path.Combine(zipDir, projNameAsPath + ".lift"); // noBOM will work with PrinceXML using var liftWriter = new CombineLiftWriter(liftPath, ByteOrderStyle.BOM); var rangesDest = Path.Combine(zipDir, projNameAsPath + ".lift-ranges"); // Write header of lift document. var header = $@" <ranges> <range id = ""semantic-domain-ddp4"" href = ""{rangesDest}""/> </ranges> <fields> <field tag = ""Plural""> <form lang = ""en""><text></text></form> <form lang = ""qaa-x-spec""><text> Class = LexEntry; Type = String; WsSelector = kwsVern </text></form> </field> </fields> "; liftWriter.WriteHeader(header); // Write out every word with all of its information var allWords = await wordRepo.GetAllWords(projectId); var frontier = await wordRepo.GetFrontier(projectId); var activeWords = frontier.Where( x => x.Senses.Any(s => s.Accessibility == State.Active)).ToList(); // All words in the frontier with any senses are considered current. // The Combine does not import senseless entries and the interface is supposed to prevent creating them. // So the the words found in allWords with no matching guid in activeWords are exported as 'deleted'. var deletedWords = allWords.Where( x => activeWords.All(w => w.Guid != x.Guid)).DistinctBy(w => w.Guid).ToList(); foreach (var wordEntry in activeWords) { var entry = new LexEntry(MakeSafeXmlAttribute(wordEntry.Vernacular), wordEntry.Guid); if (DateTime.TryParse(wordEntry.Created, out var createdTime)) { entry.CreationTime = createdTime; } if (DateTime.TryParse(wordEntry.Modified, out var modifiedTime)) { entry.ModificationTime = modifiedTime; } AddNote(entry, wordEntry); AddVern(entry, wordEntry, vernacularBcp47); AddSenses(entry, wordEntry); AddAudio(entry, wordEntry, audioDir, projectId); liftWriter.Add(entry); } foreach (var wordEntry in deletedWords) { var entry = new LexEntry(MakeSafeXmlAttribute(wordEntry.Vernacular), wordEntry.Guid); AddNote(entry, wordEntry); AddVern(entry, wordEntry, vernacularBcp47); AddSenses(entry, wordEntry); AddAudio(entry, wordEntry, audioDir, projectId); liftWriter.AddDeletedEntry(entry); } liftWriter.End(); // Export semantic domains to lift-ranges var extractedPathToImport = FileStorage.GenerateImportExtractedLocationDirPath(projectId, false); string?firstImportDir = null; if (Directory.Exists(extractedPathToImport)) { // TODO: Should an error be raised if this returns null? firstImportDir = Directory.GetDirectories(extractedPathToImport).Select( Path.GetFileName).ToList().Single(); } var importLiftDir = firstImportDir ?? ""; var rangesSrc = Path.Combine(extractedPathToImport, importLiftDir, $"{importLiftDir}.lift-ranges"); // If there are no new semantic domains, and the old lift-ranges file is still around, just copy it if (proj.SemanticDomains.Count == 0 && File.Exists(rangesSrc)) { File.Copy(rangesSrc, rangesDest, true); } else // Make a new lift-ranges file { using var liftRangesWriter = XmlWriter.Create(rangesDest, new XmlWriterSettings { Indent = true, NewLineOnAttributes = true, Async = true }); await liftRangesWriter.WriteStartDocumentAsync(); liftRangesWriter.WriteStartElement("lift-ranges"); liftRangesWriter.WriteStartElement("range"); liftRangesWriter.WriteAttributeString("id", "semantic-domain-ddp4"); // Pull from resources file with all English semantic domains var assembly = typeof(LiftService).GetTypeInfo().Assembly; const string semDomListFile = "BackendFramework.Data.sdList.txt"; var resource = assembly.GetManifestResourceStream(semDomListFile); if (resource is null) { throw new Exception($"Unable to load semantic domain list: {semDomListFile}"); } string sdList; using (var reader = new StreamReader(resource, Encoding.UTF8)) { sdList = await reader.ReadToEndAsync(); } var sdLines = sdList.Split(Environment.NewLine); foreach (var line in sdLines) { if (line != "") { var items = line.Split("`"); WriteRangeElement(liftRangesWriter, items[0], items[1], items[2], items[3]); } } // Pull from new semantic domains in project foreach (var sd in proj.SemanticDomains) { WriteRangeElement(liftRangesWriter, sd.Id, Guid.NewGuid().ToString(), sd.Name, sd.Description); } await liftRangesWriter.WriteEndElementAsync(); //end semantic-domain-ddp4 range await liftRangesWriter.WriteEndElementAsync(); //end lift-ranges await liftRangesWriter.WriteEndDocumentAsync(); await liftRangesWriter.FlushAsync(); liftRangesWriter.Close(); } // Export character set to ldml. var ldmlDir = Path.Combine(zipDir, "WritingSystems"); Directory.CreateDirectory(ldmlDir); if (vernacularBcp47 != "") { var validChars = proj.ValidCharacters; LdmlExport(ldmlDir, vernacularBcp47, validChars); } // Compress everything. var destinationFileName = Path.Combine(exportDir, Path.Combine($"LiftExportCompressed-{proj.Id}_{DateTime.Now:yyyy-MM-dd_hh-mm-ss}.zip")); var zipParentDir = Path.GetDirectoryName(zipDir); if (zipParentDir is null) { throw new Exception($"Unable to find parent dir of: {zipDir}"); } ZipFile.CreateFromDirectory(zipParentDir, destinationFileName); // Clean up the temporary folder structure that was compressed. Directory.Delete(liftExportDir, true); return(destinationFileName); }