/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { var textFactory = Cache.ServiceLocator.GetInstance <ITextFactory>(); var stTextFactory = Cache.ServiceLocator.GetInstance <IStTextFactory>(); m_text1 = textFactory.Create(); //Cache.LangProject.TextsOC.Add(m_text1); var stText1 = stTextFactory.Create(); m_text1.ContentsOA = stText1; var para1 = stText1.AddNewTextPara(null); (m_text1.ContentsOA[0]).Contents = TsStringUtils.MakeTss("xxxa xxxb xxxc xxxd xxxe, xxxa xxxb.", Cache.DefaultVernWs); InterlinMaster.LoadParagraphAnnotationsAndGenerateEntryGuessesIfNeeded(stText1, false); // setup language project parts of speech var partOfSpeechFactory = Cache.ServiceLocator.GetInstance <IPartOfSpeechFactory>(); var adjunct = partOfSpeechFactory.Create(); var noun = partOfSpeechFactory.Create(); var verb = partOfSpeechFactory.Create(); var transitiveVerb = partOfSpeechFactory.Create(); Cache.LangProject.PartsOfSpeechOA.PossibilitiesOS.Add(adjunct); Cache.LangProject.PartsOfSpeechOA.PossibilitiesOS.Add(noun); Cache.LangProject.PartsOfSpeechOA.PossibilitiesOS.Add(verb); verb.SubPossibilitiesOS.Add(transitiveVerb); adjunct.Name.set_String(Cache.DefaultAnalWs, "adjunct"); noun.Name.set_String(Cache.DefaultAnalWs, "noun"); verb.Name.set_String(Cache.DefaultAnalWs, "verb"); transitiveVerb.Name.set_String(Cache.DefaultAnalWs, "transitive verb"); }
public void TestImportCreatesReusesExistingSpeaker() { string xml = "<document><interlinear-text guid=\"AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA\">" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph><phrases><phrase speaker=\"Jimmy Dorante\" media-file=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" guid=\"BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB\" begin-time-offset=\"1\" end-time-offset=\"2\">" + "<item type=\"text\">This is a test with text.</item><word>This</word></phrase></phrases></paragraph></paragraphs>" + "<languages><language lang=\"en\" font=\"latin\" vernacular=\"false\"/></languages>" + "<media-files offset-type=\"milliseconds\"><media guid=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" location=\"file:\\\\test.wav\"/></media-files></interlinear-text></document>"; ICmPerson newPerson = null; //Create and add Jimmy to the project. NonUndoableUnitOfWorkHelper.Do(Cache.ActionHandlerAccessor, () => { Cache.LanguageProject.PeopleOA = Cache.ServiceLocator.GetInstance <ICmPossibilityListFactory>().Create(); //person not found create one and add it. newPerson = Cache.ServiceLocator.GetInstance <ICmPersonFactory>().Create(); Cache.LanguageProject.PeopleOA.PossibilitiesOS.Add(newPerson); newPerson.Name.set_String(Cache.DefaultVernWs, "Jimmy Dorante"); }); Assert.NotNull(newPerson); LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); //If the import sets the speaker in the segment to our Jimmy, and not a new Jimmy then all is well Assert.AreEqual(newPerson, text.ContentsOA[0].SegmentsOS[0].SpeakerRA, "Speaker not reused."); } }
public void OneOfEachElementTypeTest() { string title = "atrocious"; string abbr = "atroc"; //an interliner text example xml string string xml = "<document><interlinear-text>" + "<item type=\"title\" lang=\"en\">" + title + "</item>" + "<item type=\"title-abbreviation\" lang=\"en\">" + abbr + "</item>" + "<paragraphs><paragraph><phrases><phrase>" + "<item type=\"reference-number\" lang=\"en\">1 Musical</item>" + "<item type=\"note\" lang=\"pt\">origem: mary poppins</item>" + "<words><word><item type=\"txt\" lang=\"en\">supercalifragilisticexpialidocious</item>" + "<item type=\"gls\" lang=\"pt\">absurdo</item></word>" + "</words></phrase></phrases></paragraph></paragraphs></interlinear-text></document>"; LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); using (var firstEntry = Cache.LanguageProject.Texts.GetEnumerator()) { firstEntry.MoveNext(); var imported = firstEntry.Current; //The title imported Assert.True(imported.Name.get_String(Cache.WritingSystemFactory.get_Engine("en").Handle).Text.Equals(title)); //The title abbreviation imported Assert.True(imported.Abbreviation.get_String(Cache.WritingSystemFactory.get_Engine("en").Handle).Text.Equals(abbr)); } } }
/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { // setup default vernacular ws. IWritingSystem wsXkal = Cache.ServiceLocator.WritingSystemManager.Set("qaa-x-kal"); wsXkal.DefaultFontName = "Times New Roman"; Cache.ServiceLocator.WritingSystems.VernacularWritingSystems.Add(wsXkal); Cache.ServiceLocator.WritingSystems.CurrentVernacularWritingSystems.Insert(0, wsXkal); var textFactory = Cache.ServiceLocator.GetInstance <ITextFactory>(); var stTextFactory = Cache.ServiceLocator.GetInstance <IStTextFactory>(); m_text0 = textFactory.Create(); //Cache.LangProject.TextsOC.Add(m_text0); m_stText0 = stTextFactory.Create(); m_text0.ContentsOA = m_stText0; m_para0_0 = m_stText0.AddNewTextPara(null); m_para0_0.Contents = TsStringUtils.MakeTss("Xxxhope xxxthis xxxwill xxxdo. xxxI xxxhope.", wsXkal.Handle); InterlinMaster.LoadParagraphAnnotationsAndGenerateEntryGuessesIfNeeded(m_stText0, false); // paragraph 0_0 simply has wordforms as analyses foreach (var occurence in SegmentServices.GetAnalysisOccurrences(m_para0_0)) { if (occurence.HasWordform) { m_analysis_para0_0.Add(new AnalysisTree(occurence.Analysis)); } } }
public DiscourseTestHelper(FdoCache cache) { m_cache = cache; #region Load Factories and Repositories m_servLoc = m_cache.ServiceLocator; m_tsf = cache.TsStrFactory; m_wAnalysisFact = m_servLoc.GetInstance <IWfiAnalysisFactory>(); m_wGlossFact = m_servLoc.GetInstance <IWfiGlossFactory>(); m_rowFact = m_servLoc.GetInstance <IConstChartRowFactory>(); m_wordGrpFact = m_servLoc.GetInstance <IConstChartWordGroupFactory>(); m_ccTagFact = m_servLoc.GetInstance <IConstChartTagFactory>(); m_mtmFact = m_servLoc.GetInstance <IConstChartMovedTextMarkerFactory>(); m_clauseMrkrFact = m_servLoc.GetInstance <IConstChartClauseMarkerFactory>(); m_partRepo = m_servLoc.GetInstance <IConstituentChartCellPartRepository>(); m_rowRepo = m_servLoc.GetInstance <IConstChartRowRepository>(); #endregion m_text = m_servLoc.GetInstance <ITextFactory>().Create(); //Cache.LangProject.TextsOC.Add(m_text); m_stText = m_servLoc.GetInstance <IStTextFactory>().Create(); m_text.ContentsOA = m_stText; m_allOccurrences = new Dictionary <IStTxtPara, AnalysisOccurrence[]>(); m_firstPara = MakeParagraph(); }
public void TestImportFullELANData() { string xml = "<document><interlinear-text guid=\"AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA\">" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph><phrases><phrase media-file=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" guid=\"BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB\" begin-time-offset=\"1\" end-time-offset=\"2\">" + "<item type=\"text\">This is a test with text.</item><word>This</word></phrase></phrases></paragraph></paragraphs>" + "<languages><language lang=\"en\" font=\"latin\" vernacular=\"false\"/></languages>" + "<media-files offset-type=\"milliseconds\"><media guid=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" location=\"file:\\\\test.wav\"/></media-files></interlinear-text></document>"; LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); using (var firstEntry = Cache.LanguageProject.Texts.GetEnumerator()) { firstEntry.MoveNext(); var imported = firstEntry.Current; Assert.True(imported.ContentsOA.ParagraphsOS.Count > 0, "Paragraph was not imported as text content."); var para = imported.ContentsOA[0]; Assert.NotNull(para, "The imported paragraph is null?"); Assert.True(para.SegmentsOS[0].Guid.ToString().ToUpper().Equals("BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB"), "Segment guid not maintained on import."); VerifyMediaLink(imported); } } }
public void TestProvidedTextUsedIfPresent() { string xml = "<document><interlinear-text>" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph><phrases><phrase>" + "<item type=\"txt\" lang=\"en\">Text not built from words.</item>" + "<item type=\"note\" lang=\"pt\">origem: mary poppins</item>" + "<words><word><item type=\"txt\" lang=\"en\">a</item></word>" + "<word><item type=\"txt\" lang=\"en\">space</item></word>" + "<word><item type=\"txt\" lang=\"en\">space</item></word>" + "</words></phrase></phrases></paragraph></paragraphs></interlinear-text></document>"; LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); using (var firstEntry = Cache.LanguageProject.Texts.GetEnumerator()) { firstEntry.MoveNext(); var imported = firstEntry.Current; var para = imported.ContentsOA[0]; Assert.IsTrue(para.Contents.Text.Equals("Text not built from words.")); } } }
public void TestImportMergeInELANData() { string firstxml = "<document><interlinear-text guid=\"AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA\"><item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item><paragraphs><paragraph><phrases><phrase guid=\"BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB\">" + "<word guid=\"CCCCCCCC-CCCC-CCCC-CCCC-CCCCCCCCCCCC\"><item type=\"txt\" lang=\"fr\">Word</item></word></phrase></phrases></paragraph></paragraphs></interlinear-text></document>"; string secondxml = "<document><interlinear-text guid=\"AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA\"><item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item><paragraphs><paragraph><phrases><phrase guid=\"BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB\" media-file=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" begin-time-offset=\"1\" end-time-offset=\"2\">" + "<word guid=\"CCCCCCCC-CCCC-CCCC-CCCC-CCCCCCCCCCCC\"><item type=\"txt\" lang=\"fr\">Word</item></word></phrase></phrases></paragraph></paragraphs>" + "<media-files offset-type=\"milliseconds\"><media guid=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" location=\"file:\\\\test.wav\"/></media-files></interlinear-text></document>"; LinguaLinksImport li = new LLIMergeExtension(Cache, null, null); FDO.IText text = null; using (var firstStream = new MemoryStream(Encoding.ASCII.GetBytes(firstxml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), firstStream, 0, ref text); Assert.True(text.Guid.ToString().ToUpper().Equals("AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA"), "Guid not maintained during import."); using (var secondStream = new MemoryStream(Encoding.ASCII.GetBytes(secondxml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), secondStream, 0, ref text); Assert.True(text.Guid.ToString().ToUpper().Equals("AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA"), "Guid not maintained during import."); Assert.True(Cache.LanguageProject.Texts.Count == 1, "Second text not merged with the first."); Assert.True(text.ContentsOA.ParagraphsOS.Count == 1 && text.ContentsOA[0].SegmentsOS.Count == 1, "Segments from second import not merged with the first."); VerifyMediaLink(text); } } }
/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { var textFactory = Cache.ServiceLocator.GetInstance<ITextFactory>(); var stTextFactory = Cache.ServiceLocator.GetInstance<IStTextFactory>(); m_text1 = textFactory.Create(); //Cache.LangProject.TextsOC.Add(m_text1); var stText1 = stTextFactory.Create(); m_text1.ContentsOA = stText1; var para1 = stText1.AddNewTextPara(null); (m_text1.ContentsOA[0]).Contents = TsStringUtils.MakeTss("xxxa xxxb xxxc xxxd xxxe, xxxa xxxb.", Cache.DefaultVernWs); InterlinMaster.LoadParagraphAnnotationsAndGenerateEntryGuessesIfNeeded(stText1, false); // setup language project parts of speech var partOfSpeechFactory = Cache.ServiceLocator.GetInstance<IPartOfSpeechFactory>(); var adjunct = partOfSpeechFactory.Create(); var noun = partOfSpeechFactory.Create(); var verb = partOfSpeechFactory.Create(); var transitiveVerb = partOfSpeechFactory.Create(); Cache.LangProject.PartsOfSpeechOA.PossibilitiesOS.Add(adjunct); Cache.LangProject.PartsOfSpeechOA.PossibilitiesOS.Add(noun); Cache.LangProject.PartsOfSpeechOA.PossibilitiesOS.Add(verb); verb.SubPossibilitiesOS.Add(transitiveVerb); adjunct.Name.set_String(Cache.DefaultAnalWs, "adjunct"); noun.Name.set_String(Cache.DefaultAnalWs, "noun"); verb.Name.set_String(Cache.DefaultAnalWs, "verb"); transitiveVerb.Name.set_String(Cache.DefaultAnalWs, "transitive verb"); }
internal static AnalysisOccurrence GetNewAnalysisOccurence(FDO.IText text, int iPara, int iSeg, int iSegForm) { IStTxtPara para = text.ContentsOA.ParagraphsOS[iPara] as IStTxtPara; var seg = para.SegmentsOS[iSeg]; return(new AnalysisOccurrence(seg, iSegForm)); }
public void SandBoxWithGlossConfig_LoadsGuessForGlossFromAnalysis() { var mockRb = MockRepository.GenerateMock <IVwRootBox>(); mockRb.Expect(rb => rb.DataAccess).Return(Cache.MainCacheAccessor); var textFactory = Cache.ServiceLocator.GetInstance <ITextFactory>(); var stTextFactory = Cache.ServiceLocator.GetInstance <IStTextFactory>(); text = textFactory.Create(); var stText1 = stTextFactory.Create(); text.ContentsOA = stText1; var para1 = stText1.AddNewTextPara(null); (text.ContentsOA[0]).Contents = TsStringUtils.MakeTss("xxxa xxxa xxxa.", Cache.DefaultVernWs); InterlinMaster.LoadParagraphAnnotationsAndGenerateEntryGuessesIfNeeded(stText1, true); using (var mockInterlinDocForAnalyis = new MockInterlinDocForAnalyis(stText1) { MockedRootBox = mockRb }) { m_sandbox.SetInterlinDocForTest(mockInterlinDocForAnalyis); var cba0_0 = AddWordsToLexiconTests.GetNewAnalysisOccurence(text, 0, 0, 0); var wf = Cache.ServiceLocator.GetInstance <IWfiWordformFactory>().Create(TsStringUtils.MakeTss("xxxa", Cache.DefaultVernWs)); cba0_0.Analysis = Cache.ServiceLocator.GetInstance <IWfiAnalysisFactory>().Create(wf, Cache.ServiceLocator.GetInstance <IWfiGlossFactory>()); var gloss = cba0_0.Analysis.Analysis.MeaningsOC.First(); var glossTss = TsStringUtils.MakeTss("I did it", Cache.DefaultAnalWs); gloss.Form.set_String(Cache.DefaultAnalWs, glossTss); m_sandbox.SwitchWord(cba0_0); // Verify that the wordgloss was loaded into the m_sandbox Assert.AreNotEqual(0, m_sandbox.WordGlossHvo, "The gloss was not set to Default gloss from the analysis."); Assert.AreEqual(m_sandbox.WordGlossHvo, gloss.Hvo, "The gloss was not set to Default gloss from the analysis."); } }
public override void Initialize() { CheckDisposed(); base.Initialize(); m_text1 = SetupDataForText1(); m_choices = new InterlinLineChoices(0, Cache.DefaultAnalWs, Cache.LangProject); }
public override void Exit() { CheckDisposed(); m_text1 = null; m_choices = null; base.Exit(); }
internal void MakeTextAndCcl() { using (new UndoRedoTaskHelper(Cache, "ConstituentChartDatabaseTests - MakeText()", "ConstituentChartDatabaseTests - MakeText()")) { m_text1 = Cache.LangProject.TextsOC.Add(new Text()); m_text1.ContentsOA = new StText(); m_ccl = new TestCCLogic(Cache, m_chart, m_text1.ContentsOAHvo); MakeTemplate(); } }
public void TestSpacesAroundSpanishPunct() { string xml = "<document><interlinear-text>" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph><phrases><phrase>" + "<item type=\"reference-number\" lang=\"en\">1 Musical</item>" + "<item type=\"note\" lang=\"pt\">origem: mary poppins</item>" + "<words>" + "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "<word><item type=\"punct\" lang=\"en\">.¡</item></word>" + // spanish begin exclamation "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "<word><item type=\"punct\" lang=\"en\">!¿</item></word>" + // spanish begin question "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "<word><item type=\"punct\" lang=\"en\">?</item></word>" + "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "<word><item type=\"punct\" lang=\"en\">. ¿</item></word>" + "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "<word><item type=\"punct\" lang=\"en\">? "¿</item></word>" + "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "<word><item type=\"punct\" lang=\"en\">¿</item></word>" + "<word><item type=\"txt\" lang=\"en\">s</item></word>" + "</words></phrase></phrases></paragraph></paragraphs></interlinear-text></document>"; // -------------------------- // 01234567890123456789012345 // s. es! qs? s. qs? "qs qs (e=spanish beg. exclamation, q=spanish beg. question) // -------------------------- LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); using (var firstEntry = Cache.LanguageProject.Texts.GetEnumerator()) { firstEntry.MoveNext(); var imported = firstEntry.Current; // why is this null?! var para = imported.ContentsOA[0]; var spaceArray = new char[] { ' ' }; var spaceOne = para.Contents.Text.Substring(2, 1); //should be: " " var spaceTwo = para.Contents.Text.Substring(6, 1); //should be: " " var spaceThree = para.Contents.Text.Substring(10, 1); var spaceFour = para.Contents.Text.Substring(13, 1); var spaceFive = para.Contents.Text.Substring(17, 1); var spaceSix = para.Contents.Text.Substring(21, 1); //test to make sure no space was inserted before the comma, this is probably captured by the other assert Assert.AreEqual(7, para.Contents.Text.Split(spaceArray).Length); //capture correct number of spaces, and no double spaces //test to make sure spaces were inserted in each expected place CollectionAssert.AreEqual(new[] { " ", " ", " ", " ", " ", " " }, new[] { spaceOne, spaceTwo, spaceThree, spaceFour, spaceFive, spaceSix }); } } }
private ITextTag MakeTag(FDO.IText text, ICmPossibility tag, ISegment beginSeg, int begin, ISegment endSeg, int end) { ITextTag ttag = Cache.ServiceLocator.GetInstance <ITextTagFactory>().Create(); text.ContentsOA.TagsOC.Add(ttag); ttag.TagRA = tag; ttag.BeginSegmentRA = beginSeg; ttag.BeginAnalysisIndex = begin; ttag.EndSegmentRA = endSeg; ttag.EndAnalysisIndex = end; return(ttag); }
public override void Exit() { CheckDisposed(); // UndoEverything before we clear our wordform table, so we can make sure // the real wordform list is what we want to start with the next time. base.Exit(); // clear the wordform table. m_text1.Cache.LangProject.WordformInventoryOA.ResetAllWordformOccurrences(); m_text1 = null; // Dispose the sandbox m_sandbox.Dispose(); m_sandbox = null; }
private static void VerifyMediaLink(FDO.IText imported) { var mediaFilesContainer = imported.MediaFilesOA; var para = imported.ContentsOA[0]; Assert.NotNull(mediaFilesContainer, "Media Files not being imported."); Assert.True(mediaFilesContainer.MediaURIsOC.Count == 1, "Media file not imported."); using (var enumerator = para.SegmentsOS.GetEnumerator()) { enumerator.MoveNext(); var seg = enumerator.Current; Assert.True(seg.BeginTimeOffset.Equals("1"), "Begin offset not imported correctly"); Assert.True(seg.EndTimeOffset.Equals("2"), "End offset not imported correctly"); Assert.True(seg.MediaURIRA == mediaFilesContainer.MediaURIsOC.First(), "Media not correctly linked to segment."); } }
public override void Initialize() { CheckDisposed(); base.Initialize(); m_text1 = Cache.LangProject.TextsOC.Add(new Text()); m_text1.ContentsOA = new StText(); m_text1.ContentsOA.ParagraphsOS.Append(new StTxtPara()); (m_text1.ContentsOA.ParagraphsOS[0] as StTxtPara).Contents.UnderlyingTsString = StringUtils.MakeTss("xxxa xxxb xxxc xxxd xxxe, xxxa xxxb.", Cache.DefaultVernWs); bool fDidParse; ParagraphParser.ParseText(m_text1.ContentsOA, new NullProgressState(), out fDidParse); InterlinLineChoices lineChoices = InterlinLineChoices.DefaultChoices(0, Cache.DefaultAnalWs, Cache.LangProject, InterlinLineChoices.InterlinMode.GlossAddWordsToLexicon); m_sandbox = new SandboxForTests(Cache, lineChoices); }
public void SetupTexts() { // First make a regular text. using (new UndoRedoTaskHelper(Cache, "ConstituentChartDatabaseTests - SetupTexts()", "ConstituentChartDatabaseTests - SetupTexts()")) { m_text1 = Cache.LangProject.TextsOC.Add(new Text()); m_text1.ContentsOA = new StText(); StTxtPara para0 = new StTxtPara(); m_text1.ContentsOA.ParagraphsOS.Append(para0); //m_text1.ContentsOA.ParagraphsOS.Append(para1); // 1 2 3 4 5 6 // 0123456789012345678901234567890123456789012345678901234567890123456789 // this is the first paragraph for our constituent chart database tests. para0.Contents.UnderlyingTsString = m_tsf.MakeString("this is the first paragraph for our constituent chart database tests.", Cache.DefaultVernWs); } }
public void ImportParatextExportBasic() { string path = Path.Combine(DirectoryFinder.FwSourceDirectory, @"LexText/Interlinear/ITextDllTests/FlexTextImport"); string file = Path.Combine(path, "FlexTextExportOutput.flextext"); using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read)) { LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; var options = new LinguaLinksImport.ImportInterlinearOptions { Progress = new DummyProgressDlg(), BirdData = fileStream, AllottedProgress = 0, CheckAndAddLanguages = DummyCheckAndAddLanguagesInternal }; bool result = li.ImportInterlinear(options, ref text); Assert.True(result, "ImportInterlinear was not successful."); } }
private void GetTextProps() { IStText txt = CmObject.CreateFromDBObject(m_cache, m_hvoRoot) as IStText; if (txt != null) { int hvoOwner = txt.OwnerHVO; m_text = CmObject.CreateFromDBObject(m_cache, hvoOwner) as FDO.IText; if (m_text != null) { m_tssTextName = m_text.Name.BestVernacularAnalysisAlternative; m_tssTextAbbreviation = m_text.Abbreviation.BestVernacularAnalysisAlternative; } else if (SIL.FieldWorks.FDO.Scripture.Scripture.IsResponsibleFor(txt as SIL.FieldWorks.FDO.Cellar.StText)) { m_tssTextName = txt.ShortNameTSS; // sorry, no abbreviation... } } }
/// ------------------------------------------------------------------------------------ /// <summary> /// Executes in two distinct scenarios. /// /// 1. If disposing is true, the method has been called directly /// or indirectly by a user's code via the Dispose method. /// Both managed and unmanaged resources can be disposed. /// /// 2. If disposing is false, the method has been called by the /// runtime from inside the finalizer and you should not reference (access) /// other managed objects, as they already have been garbage collected. /// Only unmanaged resources can be disposed. /// </summary> /// <param name="disposing"></param> /// <remarks> /// If any exceptions are thrown, that is fine. /// If the method is being done in a finalizer, it will be ignored. /// If it is thrown by client code calling Dispose, /// it needs to be handled by fixing the bug. /// /// If subclasses override this method, they should call the base implementation. /// </remarks> /// ------------------------------------------------------------------------------------ protected override void Dispose(bool disposing) { //Debug.WriteLineIf(!disposing, "****************** " + GetType().Name + " 'disposing' is false. ******************"); // Must not be run more than once. if (IsDisposed) return; if (disposing) { // Dispose managed resources here. if (m_sandbox != null) m_sandbox.Dispose(); } // Dispose unmanaged resources here, whether disposing is true or false. m_text1 = null; base.Dispose(disposing); }
protected override void CreateTestData() { m_noun = MakePartOfSpeech("noun"); m_verb = MakePartOfSpeech("verb"); m_adj = MakePartOfSpeech("adj"); IFsFeatureSystem msFeatSys = Cache.LanguageProject.MsFeatureSystemOA; m_inflType = AddFSType(msFeatSys, "infl", AddComplexFeature(msFeatSys, "nounAgr", AddClosedFeature(msFeatSys, "num", "sg", "pl")), AddClosedFeature(msFeatSys, "tense", "pres")); m_np = Cache.LangProject.GetDefaultTextTagList().ReallyReallyAllPossibilities.Single(poss => poss.Abbreviation.BestAnalysisAlternative.Text == "Noun Phrase"); ILexEntry ni = MakeEntry("ni-", m_verb, "1SgSubj"); ILexEntry him = MakeEntry("him-", m_verb, "3SgObj"); ILexEntry bili = MakeEntry("bili", m_verb, "to see"); ILexEntry ra = MakeEntry("-ra", m_verb, "Pres", new FS { { GetFeature("tense"), GetValue("pres") } }); ILexEntry pus = MakeEntry("pus", m_adj, "green"); ILexEntry yalo = MakeEntry("yalo", m_noun, "mat"); ILexEntry la = MakeEntry("-la", m_noun, "1SgPoss", new FS { { GetFeature("nounAgr"), new FS { { GetFeature("num"), GetValue("sg") } } } }); MakeWordform("nihimbilira", "I see", m_verb, ni, him, bili, ra); MakeWordform("pus", "green", m_adj, pus); MakeWordform("yalola", "my mat", m_noun, yalo, la); MakeWordform("ban", "test", MakePartOfSpeech("pos")); m_text = MakeText("nihimbilira pus, yalola ban."); var para = (IStTxtPara)m_text.ContentsOA.ParagraphsOS.First(); MakeTag(m_text, m_np, para.SegmentsOS.First(), 1, para.SegmentsOS.First(), 3); }
/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { // setup default vernacular ws. m_wsXkal = Cache.ServiceLocator.WritingSystemManager.Set("qaa-x-kal"); m_wsXkal.DefaultFontName = "Times New Roman"; Cache.ServiceLocator.WritingSystems.VernacularWritingSystems.Add(m_wsXkal); Cache.ServiceLocator.WritingSystems.CurrentVernacularWritingSystems.Insert(0, m_wsXkal); m_textsDefn = new XmlDocument(); m_tagRepo = Cache.ServiceLocator.GetInstance <ITextTagRepository>(); ConfigurationFilePath("Language Explorer/Configuration/Words/AreaConfiguration.xml"); m_text1 = LoadTestText("FDO/FDOTests/TestData/ParagraphParserTestTexts.xml", 1, m_textsDefn); m_para1 = m_text1.ContentsOA.ParagraphsOS[0] as IStTxtPara; ParseTestText(); m_tagChild = new TestTaggingChild(Cache); m_tagChild.SetText(m_text1.ContentsOA); // This could change, but at least it gives a reasonably stable list to test from. m_textMarkupTags = Cache.LangProject.GetDefaultTextTagList(); LoadTagListPossibilities(); }
public void TestImportCreatesPersonForSpeaker() { string xml = "<document><interlinear-text guid=\"AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA\">" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph><phrases><phrase speaker=\"Jimmy Dorante\" media-file=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" guid=\"BBBBBBBB-BBBB-BBBB-BBBB-BBBBBBBBBBBB\" begin-time-offset=\"1\" end-time-offset=\"2\">" + "<item type=\"text\">This is a test with text.</item><word>This</word></phrase></phrases></paragraph></paragraphs>" + "<languages><language lang=\"en\" font=\"latin\" vernacular=\"false\"/></languages>" + "<media-files offset-type=\"milliseconds\"><media guid=\"FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF\" location=\"file:\\\\test.wav\"/></media-files></interlinear-text></document>"; LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); Assert.AreEqual("Jimmy Dorante", (Cache.LanguageProject.PeopleOA.PossibilitiesOS[0] as ICmPerson).Name.get_String(Cache.DefaultVernWs).Text, "Speaker was not created during the import."); } }
/// <summary> /// Do the conversion. The signature of this method is required for use with ProgressDialogWithTask.RunTask, /// but the parameters and return result are not actually used. /// </summary> private object DoConversion(IThreadedProgress dlg, object[] parameters) { m_firstNewText = null; foreach (var path1 in m_fileListBox.Text.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { var path = path1.Trim(); if (!File.Exists(path)) { continue; // report? } var input = new ByteReader(path); var converterStage1 = GetSfmConverter(); var stage1 = converterStage1.Convert(input, m_mappings, m_cache.WritingSystemFactory); // Skip actual import if SHIFT was held down. if (secretShiftText.Visible == true) { continue; } DoStage2Conversion(stage1, dlg); } return(null); }
/// ------------------------------------------------------------------------------------ /// <summary> /// Executes in two distinct scenarios. /// /// 1. If disposing is true, the method has been called directly /// or indirectly by a user's code via the Dispose method. /// Both managed and unmanaged resources can be disposed. /// /// 2. If disposing is false, the method has been called by the /// runtime from inside the finalizer and you should not reference (access) /// other managed objects, as they already have been garbage collected. /// Only unmanaged resources can be disposed. /// </summary> /// <param name="disposing"></param> /// <remarks> /// If any exceptions are thrown, that is fine. /// If the method is being done in a finalizer, it will be ignored. /// If it is thrown by client code calling Dispose, /// it needs to be handled by fixing the bug. /// /// If subclasses override this method, they should call the base implementation. /// </remarks> /// ------------------------------------------------------------------------------------ protected override void Dispose(bool disposing) { //Debug.WriteLineIf(!disposing, "****************** " + GetType().Name + " 'disposing' is false. ******************"); // Must not be run more than once. if (IsDisposed) { return; } if (disposing) { // Dispose managed resources here. if (m_vc != null) { m_vc.Dispose(); } if (m_writer != null) { m_writer.Close(); } if (m_stream != null) { m_stream.Close(); } } // Dispose unmanaged resources here, whether disposing is true or false. m_vc = null; m_textsDefn = null; m_text1 = null; m_exportedXml = null; m_writer = null; m_exporter = null; m_choices = null; m_stream = null; base.Dispose(disposing); }
/// ------------------------------------------------------------------------------------ /// <summary> /// Executes in two distinct scenarios. /// /// 1. If disposing is true, the method has been called directly /// or indirectly by a user's code via the Dispose method. /// Both managed and unmanaged resources can be disposed. /// /// 2. If disposing is false, the method has been called by the /// runtime from inside the finalizer and you should not reference (access) /// other managed objects, as they already have been garbage collected. /// Only unmanaged resources can be disposed. /// </summary> /// <param name="disposing"></param> /// <remarks> /// If any exceptions are thrown, that is fine. /// If the method is being done in a finalizer, it will be ignored. /// If it is thrown by client code calling Dispose, /// it needs to be handled by fixing the bug. /// /// If subclasses override this method, they should call the base implementation. /// </remarks> /// ------------------------------------------------------------------------------------ protected override void Dispose(bool disposing) { //Debug.WriteLineIf(!disposing, "****************** " + GetType().Name + " 'disposing' is false. ******************"); // Must not be run more than once. if (IsDisposed) { return; } if (disposing) { // Dispose managed resources here. if (m_sandbox != null) { m_sandbox.Dispose(); } } // Dispose unmanaged resources here, whether disposing is true or false. m_text1 = null; base.Dispose(disposing); }
public void TestEmptyParagraph() { string xml = "<document><interlinear-text>" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph/></paragraphs></interlinear-text></document>"; LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); using (var firstEntry = Cache.LanguageProject.Texts.GetEnumerator()) { firstEntry.MoveNext(); var imported = firstEntry.Current; Assert.True(imported.ContentsOA.ParagraphsOS.Count > 0, "Empty paragraph was not imported as text content."); var para = imported.ContentsOA[0]; Assert.NotNull(para, "The imported paragraph is null?"); } } }
/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { // setup default vernacular ws. IWritingSystem wsXkal = Cache.ServiceLocator.WritingSystemManager.Set("qaa-x-kal"); wsXkal.DefaultFontName = "Times New Roman"; Cache.ServiceLocator.WritingSystems.VernacularWritingSystems.Add(wsXkal); Cache.ServiceLocator.WritingSystems.CurrentVernacularWritingSystems.Insert(0, wsXkal); var textFactory = Cache.ServiceLocator.GetInstance<ITextFactory>(); var stTextFactory = Cache.ServiceLocator.GetInstance<IStTextFactory>(); m_text0 = textFactory.Create(); //Cache.LangProject.TextsOC.Add(m_text0); m_stText0 = stTextFactory.Create(); m_text0.ContentsOA = m_stText0; m_para0_0 = m_stText0.AddNewTextPara(null); m_para0_0.Contents = TsStringUtils.MakeTss("Xxxhope xxxthis xxxwill xxxdo. xxxI xxxhope.", wsXkal.Handle); InterlinMaster.LoadParagraphAnnotationsAndGenerateEntryGuessesIfNeeded(m_stText0, false); // paragraph 0_0 simply has wordforms as analyses foreach (var occurence in SegmentServices.GetAnalysisOccurrences(m_para0_0)) if (occurence.HasWordform) m_analysis_para0_0.Add(new AnalysisTree(occurence.Analysis)); }
public void TestSpacesBetweenWords() { string xml = "<document><interlinear-text>" + "<item type=\"title\" lang=\"en\">wordspace</item>" + "<item type=\"title-abbreviation\" lang=\"en\">ws</item>" + "<paragraphs><paragraph><phrases><phrase>" + "<item type=\"reference-number\" lang=\"en\">1 Musical</item>" + "<item type=\"note\" lang=\"pt\">origem: mary poppins</item>" + "<words><word><item type=\"txt\" lang=\"en\">a</item></word>" + "<word><item type=\"txt\" lang=\"en\">space</item></word>" + "<word><item type=\"txt\" lang=\"en\">space</item></word>" + "</words></phrase></phrases></paragraph></paragraphs></interlinear-text></document>"; LinguaLinksImport li = new LinguaLinksImport(Cache, null, null); FDO.IText text = null; using (var stream = new MemoryStream(Encoding.ASCII.GetBytes(xml.ToCharArray()))) { li.ImportInterlinear(new DummyProgressDlg(), stream, 0, ref text); using (var firstEntry = Cache.LanguageProject.Texts.GetEnumerator()) { firstEntry.MoveNext(); var imported = firstEntry.Current; var para = imported.ContentsOA[0]; var spaceOne = para.Contents.Text.Substring(1, 1); //should be: " " var wordAfter = para.Contents.Text.Substring(2, 5); //should be: "space" var spaceTwo = para.Contents.Text.Substring(7, 1); //should be: " " //test to make sure no space was inserted before the first word. Assert.IsFalse(" ".Equals(para.Contents.GetSubstring(0, 1))); //test to make sure spaces were inserted between "a" and "space", and between "space" and "space" //any extra spaces would result in the "space" word looking like " spac" Assert.IsTrue(spaceOne.Equals(spaceTwo)); Assert.IsTrue(wordAfter.Equals("space")); } } }
/// <summary> /// Do the conversion. The signature of this method is required for use with ProgressDialogWithTask.RunTask, /// but the parameters and return result are not actually used. /// </summary> private object DoConversion(IThreadedProgress dlg, object[] parameters) { m_firstNewText = null; foreach (var path1 in InputFiles) { var path = path1.Trim(); if (!File.Exists(path)) continue; // report? var input = new ByteReader(path); var converterStage1 = GetSfmConverter(); var stage1 = converterStage1.Convert(input, m_mappings, m_cache.WritingSystemFactory); // Skip actual import if SHIFT was held down. if (secretShiftText.Visible == true) continue; DoStage2Conversion(stage1, dlg); } return null; }
/// <summary> /// This method will create a new Text document from the given BIRD format Interlineartext. /// </summary> /// <param name="newText">The text to populate, could be set to null.</param> /// <param name="textParams"></param> private static bool PopulateTextFromBIRDDoc(ref FDO.IText newText, TextCreationParams textParams) { s_importOptions = textParams.ImportOptions; Interlineartext interlinText = textParams.InterlinText; FdoCache cache = textParams.Cache; IThreadedProgress progress = textParams.Progress; if (s_importOptions.CheckAndAddLanguages == null) { s_importOptions.CheckAndAddLanguages = CheckAndAddLanguagesInternal; } ILgWritingSystemFactory wsFactory = cache.WritingSystemFactory; char space = ' '; //handle the languages(writing systems) section alerting the user if new writing systems are encountered if (!s_importOptions.CheckAndAddLanguages(cache, interlinText, wsFactory, progress)) { return(false); } //handle the header(info or meta) information SetTextMetaAndMedia(cache, interlinText, wsFactory, newText); //create all the paragraphs foreach (var paragraph in interlinText.paragraphs) { if (newText.ContentsOA == null) { newText.ContentsOA = cache.ServiceLocator.GetInstance <IStTextFactory>().Create(); } IStTxtPara newTextPara = newText.ContentsOA.AddNewTextPara(""); int offset = 0; if (paragraph.phrases == null) { continue; } foreach (var phrase in paragraph.phrases) { ICmObject oldSegment = null; //Try and locate a segment with this Guid. if (!String.IsNullOrEmpty(phrase.guid)) { if (cache.ServiceLocator.ObjectRepository.TryGetObject(new Guid(phrase.guid), out oldSegment)) { //We aren't merging, but we have this guid in our system, ignore the file Guid oldSegment = cache.ServiceLocator.GetInstance <ISegmentFactory>().Create(newTextPara, offset); } else { //The segment is identified by a Guid, but apparently we don't have it in our current document, so make one with the guid oldSegment = cache.ServiceLocator.GetInstance <ISegmentFactory>().Create(newTextPara, offset, cache, new Guid(phrase.guid)); } } //set newSegment to the old, or create a brand new one. ISegment newSegment = oldSegment as ISegment ?? cache.ServiceLocator.GetInstance <ISegmentFactory>().Create(newTextPara, offset); var tsStrFactory = cache.ServiceLocator.GetInstance <ITsStrFactory>(); //Fill in the ELAN time information if it is present. AddELANInfoToSegment(cache, phrase, newSegment); ITsString phraseText = null; bool textInFile = false; //Add all of the data from <item> elements into the segment. AddSegmentItemData(cache, wsFactory, phrase, newSegment, tsStrFactory, ref textInFile, ref phraseText); bool lastWasWord = false; if (phrase.WordsContent != null && phrase.WordsContent.Words != null) { if (textParams.Version == 0 && PhraseHasExactlyOneTxtItemNotAKnownWordform(newSegment.Cache, phrase)) { // It might be a SayMore text that makes the whole segment a single txt item. // We want to add the text anyway (unless a higher level did so), but we will skip making // a wordform. Eventual parsing of the text will do so. if (!textInFile) { UpdatePhraseTextForWordItems(wsFactory, tsStrFactory, ref phraseText, phrase.WordsContent.Words[0], ref lastWasWord, space); } } else { foreach (var word in phrase.WordsContent.Words) { //If the text of the phrase was not given in the document build it from the words. if (!textInFile) { UpdatePhraseTextForWordItems(wsFactory, tsStrFactory, ref phraseText, word, ref lastWasWord, space); } AddWordToSegment(newSegment, word, tsStrFactory); } } } UpdateParagraphTextForPhrase(newTextPara, ref offset, phraseText); } } return(true); }
private static void SetTextMetaAndMedia(FdoCache cache, Interlineartext interlinText, ILgWritingSystemFactory wsFactory, FDO.IText newText) { if (interlinText.Items != null) // apparently it is null if there are no items. { foreach (var item in interlinText.Items) { switch (item.type) { case "title": newText.Name.set_String(GetWsEngine(wsFactory, item.lang).Handle, item.Value); break; case "title-abbreviation": newText.Abbreviation.set_String(GetWsEngine(wsFactory, item.lang).Handle, item.Value); break; case "source": newText.Source.set_String(GetWsEngine(wsFactory, item.lang).Handle, item.Value); break; case "comment": newText.Description.set_String(GetWsEngine(wsFactory, item.lang).Handle, item.Value); break; } } } //handle media files section (ELAN initiated) needs to be processed before the paragraphs as segmenets could reference //these parts. if (interlinText.mediafiles != null) { var mediaFiles = newText.MediaFilesOA = cache.ServiceLocator.GetInstance <ICmMediaContainerFactory>().Create(); mediaFiles.OffsetType = interlinText.mediafiles.offsetType; foreach (var mediaFile in interlinText.mediafiles.media) { var media = cache.ServiceLocator.GetInstance <ICmMediaURIFactory>().Create(cache, new Guid(mediaFile.guid)); mediaFiles.MediaURIsOC.Add(media); media.MediaURI = mediaFile.location; } } }
public override void Initialize() { CheckDisposed(); base.Initialize(); SetupOldWordformingOverrides(); m_text1 = LoadTestText(@"LexText\Interlinear\ITextDllTests\ParagraphParserTestTexts.xml", 1, m_textsDefn); }
private static bool MergeTextWithBIRDDoc(ref FDO.IText newText, TextCreationParams textParams) { s_importOptions = textParams.ImportOptions; Interlineartext interlinText = textParams.InterlinText; FdoCache cache = textParams.Cache; IThreadedProgress progress = textParams.Progress; if (s_importOptions.CheckAndAddLanguages == null) { s_importOptions.CheckAndAddLanguages = CheckAndAddLanguagesInternal; } ILgWritingSystemFactory wsFactory = cache.WritingSystemFactory; char space = ' '; //handle the languages(writing systems) section alerting the user if new writing systems are encountered if (!s_importOptions.CheckAndAddLanguages(cache, interlinText, wsFactory, progress)) { return(false); } //handle the header(info or meta) information as well as any media-files sections SetTextMetaAndMedia(cache, interlinText, wsFactory, newText); IStText oldContents = newText.ContentsOA; IStText newContents = null; //create all the paragraphs NOTE: Currently the paragraph guids are being ignored, this might be wrong. foreach (var paragraph in interlinText.paragraphs) { if (newContents == null) { newContents = cache.ServiceLocator.GetInstance <IStTextFactory>().Create(); newText.ContentsOA = newContents; } IStTxtPara newTextPara = newContents.AddNewTextPara(""); int offset = 0; if (paragraph.phrases == null) { continue; } foreach (var phrase in paragraph.phrases) { ICmObject oldSegment = null; //Try and locate a segment with this Guid. if (!String.IsNullOrEmpty(phrase.guid)) { if (cache.ServiceLocator.ObjectRepository.TryGetObject(new Guid(phrase.guid), out oldSegment)) { if (oldSegment as ISegment != null) //The segment matches, add it into our paragraph. { newTextPara.SegmentsOS.Add(oldSegment as ISegment); } else if (oldSegment == null) //The segment is identified by a Guid, but apparently we don't have it in our current document, so make one { oldSegment = cache.ServiceLocator.GetInstance <ISegmentFactory>().Create(newTextPara, offset, cache, new Guid(phrase.guid)); } else //The Guid is in use, but not by a segment. This is bad. { return(false); } } } //set newSegment to the old, or create a brand new one. ISegment newSegment = oldSegment as ISegment ?? cache.ServiceLocator.GetInstance <ISegmentFactory>().Create(newTextPara, offset); var tsStrFactory = cache.ServiceLocator.GetInstance <ITsStrFactory>(); //Fill in the ELAN time information if it is present. AddELANInfoToSegment(cache, phrase, newSegment); ITsString phraseText = null; bool textInFile = false; //Add all of the data from <item> elements into the segment. AddSegmentItemData(cache, wsFactory, phrase, newSegment, tsStrFactory, ref textInFile, ref phraseText); bool lastWasWord = false; if (phrase.WordsContent != null && phrase.WordsContent.Words != null) { foreach (var word in phrase.WordsContent.Words) { //If the text of the phrase was not found in a "txt" item for this segment then build it from the words. if (!textInFile) { UpdatePhraseTextForWordItems(wsFactory, tsStrFactory, ref phraseText, word, ref lastWasWord, space); } MergeWordToSegment(newSegment, word, tsStrFactory); } } UpdateParagraphTextForPhrase(newTextPara, ref offset, phraseText); } } return(true); }
public override void Exit() { CheckDisposed(); // UndoEverything before we clear our wordform table, so we can make sure // the real wordform list is what we want to start with the next time. base.Exit(); // clear the wordform table. m_text1.Cache.LangProject.WordformInventoryOA.ResetAllWordformOccurrences(); m_text1 = null; }
/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { // setup default vernacular ws. m_wsXkal = Cache.ServiceLocator.WritingSystemManager.Set("qaa-x-kal"); m_wsXkal.DefaultFontName = "Times New Roman"; Cache.ServiceLocator.WritingSystems.VernacularWritingSystems.Add(m_wsXkal); Cache.ServiceLocator.WritingSystems.CurrentVernacularWritingSystems.Insert(0, m_wsXkal); m_textsDefn = new XmlDocument(); m_tagRepo = Cache.ServiceLocator.GetInstance<ITextTagRepository>(); ConfigurationFilePath("Language Explorer/Configuration/Words/AreaConfiguration.xml"); m_text1 = LoadTestText("FDO/FDOTests/TestData/ParagraphParserTestTexts.xml", 1, m_textsDefn); m_para1 = m_text1.ContentsOA.ParagraphsOS[0] as IStTxtPara; ParseTestText(); m_tagChild = new TestTaggingChild(Cache); m_tagChild.SetText(m_text1.ContentsOA); // This could change, but at least it gives a reasonably stable list to test from. m_textMarkupTags = Cache.LangProject.GetDefaultTextTagList(); LoadTagListPossibilities(); }
internal TextBuilder(TextBuilder tbToClone) : this(tbToClone.m_cache) { m_text = tbToClone.m_text; this.SelectedNode = ParagraphBuilder.Snapshot(tbToClone.SelectedNode); }
/// ------------------------------------------------------------------------------------ /// <summary> /// Executes in two distinct scenarios. /// /// 1. If disposing is true, the method has been called directly /// or indirectly by a user's code via the Dispose method. /// Both managed and unmanaged resources can be disposed. /// /// 2. If disposing is false, the method has been called by the /// runtime from inside the finalizer and you should not reference (access) /// other managed objects, as they already have been garbage collected. /// Only unmanaged resources can be disposed. /// </summary> /// <param name="disposing"></param> /// <remarks> /// If any exceptions are thrown, that is fine. /// If the method is being done in a finalizer, it will be ignored. /// If it is thrown by client code calling Dispose, /// it needs to be handled by fixing the bug. /// /// If subclasses override this method, they should call the base implementation. /// </remarks> /// ------------------------------------------------------------------------------------ protected override void Dispose(bool disposing) { //Debug.WriteLineIf(!disposing, "****************** " + GetType().Name + " 'disposing' is false. ******************"); // Must not be run more than once. if (IsDisposed) return; if (disposing) { // Dispose managed resources here. if (m_vc != null) m_vc.Dispose(); if (m_writer != null) m_writer.Close(); if (m_stream != null) m_stream.Close(); } // Dispose unmanaged resources here, whether disposing is true or false. m_vc = null; m_textsDefn = null; m_text1 = null; m_exportedXml = null; m_writer = null; m_exporter = null; m_choices = null; m_stream = null; base.Dispose(disposing); }
public override void FixtureTeardown() { m_textsDefn = null; m_text1 = null; base.FixtureTeardown(); }
/// <summary> /// non-undoable task /// </summary> private void DoSetupFixture() { // Setup default analysis ws m_wsEn = Cache.ServiceLocator.WritingSystemManager.Get("en"); // setup default vernacular ws. m_wsXkal = Cache.ServiceLocator.WritingSystemManager.Set("qaa-x-kal"); m_wsXkal.DefaultFontName = "Times New Roman"; Cache.ServiceLocator.WritingSystems.DefaultVernacularWritingSystem = m_wsXkal; m_text1 = LoadTestText( Path.Combine("FDO", Path.Combine("FDOTests", Path.Combine("TestData", "ParagraphParserTestTexts.xml"))), 1, m_textsDefn); // capture text defn state. m_testFixtureTextsDefn = m_textsDefn; }
/// <summary> /// Creates the text to be used by this TextBuilder /// </summary> /// <param name="fCreateContents"></param> /// <returns></returns> public FDO.IText CreateText(bool fCreateContents) { Debug.Assert(m_text == null); m_text = CreateText(); if (fCreateContents) CreateContents(m_text); return m_text; }
internal FDO.IText BuildText(XmlNode textDefn) { if (textDefn.Name != "Text") return null; m_textDefn = textDefn; // 1. Create the new text and give it an owner. m_text = this.CreateText(); textDefn.Attributes["id"].Value = m_text.Hvo.ToString(); XmlNode name = textDefn.SelectSingleNode("Name5/AUni"); // 2. If we have a name, set it. if (name != null && name.InnerText.Length > 0) { string wsAbbr = XmlUtils.GetOptionalAttributeValue(name, "ws"); int ws = 0; if (wsAbbr != null) ws = m_cache.ServiceLocator.WritingSystemManager.GetWsFromStr(wsAbbr); if (ws == 0) ws = m_cache.DefaultVernWs; this.SetName(name.InnerText, ws); } // 3. Create a body for the text; XmlNode contents = textDefn.SelectSingleNode("Contents5054/StText"); if (contents == null) return m_text; IStText body = this.CreateContents(m_text); // 4. Create each paragraph for the text. XmlNodeList paragraphs = contents.SelectNodes("Paragraphs14/StTxtPara"); if (paragraphs != null) { foreach (XmlNode paraDef in paragraphs) { ParagraphBuilder pb = new ParagraphBuilder(m_cache, m_text.ContentsOA.ParagraphsOS); IStTxtPara realPara = pb.BuildParagraphContent(paraDef); } } return m_text; }
void SetupTexts() { // First make a regular text. using (new UndoRedoTaskHelper(Cache, "ConcordanceControlTests - SetupTexts()", "ConcordanceControlTests - SetupTexts()")) { m_text1 = Cache.LangProject.TextsOC.Add(new Text()); m_text1.ContentsOA = new StText(); StTxtPara para0 = new StTxtPara(); StTxtPara para1 = new StTxtPara(); m_text1.ContentsOA.ParagraphsOS.Append(para0); m_text1.ContentsOA.ParagraphsOS.Append(para1); ITsStrFactory tsf = TsStrFactoryClass.Create(); // 1 2 3 4 5 6 // 0123456789012345678901234567890123456789012345678901234567890123456789 // XXXXsecrecyZZZ; XXXsentenceZZZ!! // XXXlocoZZZ, XXXsegmentZZZ?? ZZZamazingXXX wonderfulXXXzzzcounselor!! para0.Contents.UnderlyingTsString = tsf.MakeString("XXXXsecrecyZZZ; XXXsentenceZZZ!!", Cache.DefaultVernWs); para1.Contents.UnderlyingTsString = tsf.MakeString("XXXlocoZZZ, XXXsegmentZZZ?? ZZZamazingXXX wonderfulXXXzzzcounselor!!", Cache.DefaultVernWs); // add scripture m_newBook1 = new ScrBook(); Cache.LangProject.TranslatedScriptureOA.ScriptureBooksOS.Append(m_newBook1); m_newBook1.TitleOA = new StText(); m_newBook1.TitleOA.ParagraphsOS.Append(new StTxtPara()); (m_newBook1.TitleOA.ParagraphsOS[0] as StTxtPara).Contents.UnderlyingTsString = tsf.MakeString("XXXnewBook1zzz.Title", Cache.DefaultVernWs); IScrSection newSection1_0 = m_newBook1.SectionsOS.Append(new ScrSection()); newSection1_0.ContentOA = new StText(); StTxtPara paraSection1_0 = new StTxtPara(); (newSection1_0.ContentOA as StText).ParagraphsOS.Append(paraSection1_0); paraSection1_0.Contents.UnderlyingTsString = tsf.MakeString("ZZZnewBook1.Section0.Introduction1XXX", Cache.DefaultVernWs); IScrSection newSection1_1 = m_newBook1.SectionsOS.Append(new ScrSection()); newSection1_1.ContentOA = new StText(); StTxtPara paraSection1_1 = new StTxtPara(); (newSection1_1.ContentOA as StText).ParagraphsOS.Append(paraSection1_1); paraSection1_1.Contents.UnderlyingTsString = tsf.MakeString("XXXnewBook1.Section1.1:1-1:20ZZZ", Cache.DefaultVernWs); // section.VerseRefEnd = book.CanonicalNum * 1000000 + 1 * 1000 + (introSection ? 0 : 1); newSection1_1.VerseRefEnd = 70 * 1000000 + 1 * 1000 + 1; // setup some basic analyses for the texts. string formLexEntry = "XXXlexEntry1"; ITsString tssLexEntryForm = StringUtils.MakeTss(formLexEntry, Cache.DefaultVernWs); int clsidForm; StringUtils.ReassignTss(ref tssLexEntryForm, StringUtils.MakeTss(formLexEntry, Cache.DefaultVernWs)); ILexEntry lexEntry1_Entry = LexEntry.CreateEntry(Cache, MoMorphType.FindMorphType(Cache, new MoMorphTypeCollection(Cache), ref formLexEntry, out clsidForm), tssLexEntryForm, "XXXlexEntry1.sense1", null); ILexSense lexEntry1_Sense1 = lexEntry1_Entry.SensesOS[0]; ILexSense lexEntry1_Sense2 = LexSense.CreateSense(lexEntry1_Entry, null, "XXXlexEntry1.sense2"); ParagraphAnnotator tapara0 = new ParagraphAnnotator(para0); ParagraphAnnotator tapara1 = new ParagraphAnnotator(para1); ParagraphAnnotator taSection1_0 = new ParagraphAnnotator(paraSection1_0); ParagraphAnnotator taSection1_1 = new ParagraphAnnotator(paraSection1_1); // currently setup mono-morphemic search ArrayList morphForms = new ArrayList(); formLexEntry = "XXXXsecrecyZZZ"; StringUtils.ReassignTss(ref tssLexEntryForm, StringUtils.MakeTss(formLexEntry, Cache.DefaultVernWs)); ILexEntry lexEntry2_Entry = LexEntry.CreateEntry(Cache, MoMorphType.FindMorphType(Cache, new MoMorphTypeCollection(Cache), ref formLexEntry, out clsidForm), tssLexEntryForm, "XXXlexEntry2.sense1", null); morphForms.Add(lexEntry2_Entry.LexemeFormOA); IWfiAnalysis wfiAnalysis = tapara0.BreakIntoMorphs(0, 0, morphForms); m_wfXXXXsecrecyZZZ = (wfiAnalysis as WfiAnalysis).Owner as IWfiWordform; ILexSense lexEntry2_Sense1 = lexEntry2_Entry.SensesOS[0]; tapara0.SetMorphSense(0, 0, 0, lexEntry2_Sense1); formLexEntry = "XXXsegmentZZZ"; StringUtils.ReassignTss(ref tssLexEntryForm, StringUtils.MakeTss(formLexEntry, Cache.DefaultVernWs)); ILexEntry lexEntry3_Entry = LexEntry.CreateEntry(Cache, MoMorphType.FindMorphType(Cache, new MoMorphTypeCollection(Cache), ref formLexEntry, out clsidForm), tssLexEntryForm, "XXXlexEntry3.sense1", null); morphForms[0] = lexEntry3_Entry.LexemeFormOA; tapara1.BreakIntoMorphs(0, 2, morphForms); ILexSense lexEntry3_Sense1 = lexEntry3_Entry.SensesOS[0]; tapara1.SetMorphSense(0, 2, 0, lexEntry3_Sense1); formLexEntry = "ZZZamazingXXX"; StringUtils.ReassignTss(ref tssLexEntryForm, StringUtils.MakeTss(formLexEntry, Cache.DefaultVernWs)); ILexEntry lexEntry4_Entry = LexEntry.CreateEntry(Cache, MoMorphType.FindMorphType(Cache, new MoMorphTypeCollection(Cache), ref formLexEntry, out clsidForm), tssLexEntryForm, "XXXlexEntry4.sense1", null); morphForms[0] = lexEntry4_Entry.LexemeFormOA; tapara1.BreakIntoMorphs(1, 0, morphForms); ILexSense lexEntry4_Sense1 = lexEntry4_Entry.SensesOS[0]; tapara1.SetMorphSense(1, 0, 0, lexEntry4_Sense1); //XXXlocoZZZ morphForms[0] = "XXXlocoZZZ"; tapara1.BreakIntoMorphs(0, 0, morphForms); tapara1.SetMorphSense(0, 0, 0, lexEntry1_Sense2); formLexEntry = paraSection1_0.Contents.Text; StringUtils.ReassignTss(ref tssLexEntryForm, StringUtils.MakeTss(formLexEntry, Cache.DefaultVernWs)); ILexEntry lexEntry5_Entry = LexEntry.CreateEntry(Cache, MoMorphType.FindMorphType(Cache, new MoMorphTypeCollection(Cache), ref formLexEntry, out clsidForm), tssLexEntryForm, "XXXlexEntry5.sense1", null); morphForms[0] = lexEntry5_Entry.LexemeFormOA; taSection1_0.BreakIntoMorphs(0, 0, morphForms); ILexSense lexEntry5_Sense1 = lexEntry5_Entry.SensesOS[0]; taSection1_0.SetMorphSense(0, 0, 0, lexEntry5_Sense1); morphForms[0] = paraSection1_1.Contents.Text; taSection1_1.BreakIntoMorphs(0, 0, morphForms); // won't match on LexEntry taSection1_1.SetMorphSense(0, 0, 0, lexEntry1_Sense2); // will match on LexGloss string gloss; tapara0.SetDefaultWordGloss(0, 0, out gloss); tapara1.SetDefaultWordGloss(0, 2, out gloss); tapara1.SetDefaultWordGloss(1, 0, out gloss); taSection1_1.SetDefaultWordGloss(0, 0, out gloss); StTxtPara.TwficInfo infoCba0_0_0 = new StTxtPara.TwficInfo(Cache, tapara0.GetSegmentForm(0, 0)); StTxtPara.TwficInfo infoCba1_0_2 = new StTxtPara.TwficInfo(Cache, tapara1.GetSegmentForm(0, 2)); StTxtPara.TwficInfo infoCbaScr_0_0 = new StTxtPara.TwficInfo(Cache, taSection1_1.GetSegmentForm(0, 0)); int segDefn_literalTranslation = Cache.GetIdFromGuid(LangProject.kguidAnnLiteralTranslation); int segDefn_freeTranslation = Cache.GetIdFromGuid(LangProject.kguidAnnFreeTranslation); int segDefn_note = Cache.GetIdFromGuid(LangProject.kguidAnnNote); BaseFreeformAdder ffAdder = new BaseFreeformAdder(Cache); ICmIndirectAnnotation freeTrans0 = ffAdder.AddFreeformAnnotation(infoCba0_0_0.SegmentHvo, segDefn_freeTranslation); freeTrans0.Comment.SetAlternative("Para0.Segment0: XXXFreeform translation.", Cache.DefaultAnalWs); ICmIndirectAnnotation literalTrans0 = ffAdder.AddFreeformAnnotation(infoCba0_0_0.SegmentHvo, segDefn_literalTranslation); literalTrans0.Comment.SetAlternative("Para0.Segment0: XXXLiteral translation.", Cache.DefaultAnalWs); ICmIndirectAnnotation note0 = ffAdder.AddFreeformAnnotation(infoCba0_0_0.SegmentHvo, segDefn_note); note0.Comment.SetAlternative("Para0.Segment0: XXXNote.", Cache.DefaultAnalWs); ICmIndirectAnnotation freeTrans1 = ffAdder.AddFreeformAnnotation(infoCba1_0_2.SegmentHvo, segDefn_freeTranslation); freeTrans1.Comment.SetAlternative("Para1.Segment0: XXXFreeform translation.", Cache.DefaultAnalWs); ICmIndirectAnnotation literalTrans1 = ffAdder.AddFreeformAnnotation(infoCba1_0_2.SegmentHvo, segDefn_literalTranslation); literalTrans1.Comment.SetAlternative("Para1.Segment0: XXXLiteral translation.", Cache.DefaultAnalWs); ICmIndirectAnnotation note1 = ffAdder.AddFreeformAnnotation(infoCba1_0_2.SegmentHvo, segDefn_note); note1.Comment.SetAlternative("Para1.Segment0: XXXNote.", Cache.DefaultAnalWs); // Scripture ICmIndirectAnnotation freeTransScr1 = ffAdder.AddFreeformAnnotation(infoCbaScr_0_0.SegmentHvo, segDefn_freeTranslation); freeTransScr1.Comment.SetAlternative("Scr1.Para0.Segment0: XXXFreeform translation.", Cache.DefaultAnalWs); ICmIndirectAnnotation literalTransScr1 = ffAdder.AddFreeformAnnotation(infoCbaScr_0_0.SegmentHvo, segDefn_literalTranslation); literalTransScr1.Comment.SetAlternative("Scr1.Para0.Segment0: XXXLiteral translation.", Cache.DefaultAnalWs); ICmIndirectAnnotation noteScr1 = ffAdder.AddFreeformAnnotation(infoCbaScr_0_0.SegmentHvo, segDefn_note); noteScr1.Comment.SetAlternative("Scr1.Para0.Segment0: XXXNote.", Cache.DefaultAnalWs); } }
/// <summary> /// Import a file which looks like a FieldWorks interlinear XML export. /// </summary> /// <param name="dlg"></param> /// <param name="parameters"></param> /// <returns></returns> public object ImportInterlinear(IThreadedProgress dlg, object[] parameters) { bool retValue = false; Debug.Assert(parameters.Length == 1); using (var stream = new FileStream((string) parameters[0], FileMode.Open, FileAccess.Read)) { FDO.IText firstNewText = null; retValue = ImportInterlinear(dlg, stream, 100, ref firstNewText); FirstNewText = firstNewText; } return retValue; }