public void AddTo_RefColToRefCol() { IFdoServiceLocator servLoc = Cache.ServiceLocator; ILangProject lp = Cache.LanguageProject; ILexDb lexDb = lp.LexDbOA; ILexAppendix app1 = servLoc.GetInstance <ILexAppendixFactory>().Create(); lexDb.AppendixesOC.Add(app1); ILexAppendix app2 = servLoc.GetInstance <ILexAppendixFactory>().Create(); lexDb.AppendixesOC.Add(app2); ILexEntry le1 = servLoc.GetInstance <ILexEntryFactory>().Create(); ILexSense sense1 = servLoc.GetInstance <ILexSenseFactory>().Create(); le1.SensesOS.Add(sense1); ILexSense sense2 = servLoc.GetInstance <ILexSenseFactory>().Create(); le1.SensesOS.Add(sense2); sense1.AppendixesRC.Add(app1); sense1.AppendixesRC.Add(app2); sense1.AppendixesRC.AddTo(sense2.AppendixesRC); Assert.AreEqual(2, sense2.AppendixesRC.Count); Assert.IsTrue(sense2.AppendixesRC.Contains(app1)); Assert.IsTrue(sense2.AppendixesRC.Contains(app2)); }
public void CountPropertyTests() { IFdoServiceLocator servLoc = Cache.ServiceLocator; ILangProject lp = Cache.LanguageProject; ILexDb lexDb = lp.LexDbOA; ILexEntry le = servLoc.GetInstance <ILexEntryFactory>().Create(); ILexSense sense = servLoc.GetInstance <ILexSenseFactory>().Create(); le.SensesOS.Add(sense); // FdoReferenceCollection int originalCount = lexDb.LexicalFormIndexRC.Count; lexDb.LexicalFormIndexRC.Add(le); Assert.AreEqual(originalCount + 1, lexDb.LexicalFormIndexRC.Count); lexDb.LexicalFormIndexRC.Remove(le); Assert.AreEqual(originalCount, lexDb.LexicalFormIndexRC.Count); // FdoReferenceSequence originalCount = le.MainEntriesOrSensesRS.Count; le.MainEntriesOrSensesRS.Add(sense); Assert.AreEqual(originalCount + 1, le.MainEntriesOrSensesRS.Count); le.MainEntriesOrSensesRS.RemoveAt(le.MainEntriesOrSensesRS.Count - 1); Assert.AreEqual(originalCount, le.MainEntriesOrSensesRS.Count); }
public void ModifyMinorObject() { CheckDisposed(); ILexDb ldb = Cache.LangProject.LexDbOA; ILexEntry le = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); ILexSense ls = (ILexSense)le.SensesOS.Append(new LexSense()); // Don't try this...LexExampleSentence has an initialize method that needs the database. // LexExampleSentence es = new LexExampleSentence(); // ls.ExamplesOS.Append(es); ILexSense ls2 = (ILexSense)ls.SensesOS.Append(new LexSense()); DateTime current = le.DateModified.Subtract(TimeSpan.FromMinutes(2.0)); m_cmtManager.Disabled = true; // prevent resetting as a side-effect of the change! le.DateModified = current; m_cmtManager.Disabled = false; ls2.Gloss.VernacularDefaultWritingSystem = "abc"; Assert.IsTrue(DateTime.Compare(le.DateModified, current) > 0, "modify time increased setting string prop of 2-level child"); m_cmtManager.Disabled = true; // prevent resetting as a side-effect of the change! le.DateModified = current; m_cmtManager.Disabled = false; m_cmtManager.PropChanged(le.Hvo, (int)LexDb.LexDbTags.kflidEntries, 0, 0, 0); ILexSense ls3 = (ILexSense)ls2.SensesOS.Append(new LexSense()); Assert.IsTrue(DateTime.Compare(le.DateModified, current) > 0, "modify time increased setting object prop of 2-level child"); }
private static void AddExtendedNoteTypes(ILexDb lexDb) { var cache = lexDb.Cache; var servLoc = cache.ServiceLocator; var dataReader = (IDataReader)servLoc.GetInstance <IDataSetup>(); var tsf = cache.TsStrFactory; var eng = servLoc.WritingSystemManager.UserWritingSystem; var typesList = lexDb.ExtendedNoteTypesOA; var possibilityFactory = servLoc.GetInstance <ICmPossibilityFactory>() as ICmPossibilityFactoryInternal; for (var i = 1; i <= 5; i++) { var guid = Guid.Empty; ITsString name = null; ITsString abbr = null; switch (i) { case 1: guid = new Guid("2f06d436-b1e0-47ae-a42e-1f7b893c5fc2"); name = tsf.MakeString("Collocation", eng.Handle); abbr = tsf.MakeString("Coll.", eng.Handle); break; case 2: guid = new Guid("7ad06e7d-15d1-42b0-ae19-9c05b7c0b181"); name = tsf.MakeString("Cultural", eng.Handle); abbr = tsf.MakeString("Cult.", eng.Handle); break; case 3: guid = new Guid("d3d28628-60c9-4917-8185-ba64c59f20c3"); name = tsf.MakeString("Discourse", eng.Handle); abbr = tsf.MakeString("Disc.", eng.Handle); break; case 4: guid = new Guid("30115b33-608a-4506-9f9c-2457cab4f4a8"); name = tsf.MakeString("Grammar", eng.Handle); abbr = tsf.MakeString("Gram.", eng.Handle); break; case 5: guid = new Guid("5dd29371-fdb0-497a-a2fb-7ca69b00ad4f"); name = tsf.MakeString("Semantic", eng.Handle); abbr = tsf.MakeString("Sem.", eng.Handle); break; } // Create the ExtendedNoteType. var poss = possibilityFactory.Create( guid, dataReader.GetNextRealHvo(), typesList, i - 1); // Zero based ord. poss.Name.set_String(eng.Handle, name); poss.Abbreviation.set_String(eng.Handle, abbr); } }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// ------------------------------------------------------------------------------------ protected override void CreateTestData() { m_inMemoryCache.InitializeWritingSystemEncodings(); m_inMemoryCache.InitializeLexDb(); m_ldb = Cache.LangProject.LexDbOA; m_entriesCol = m_ldb.EntriesOC; }
public void TwoAnalyses() { IWfiWordform catsTest = CheckAnalysisSize("catsTEST", 0, true); ILexDb ldb = Cache.LanguageProject.LexDbOA; ParseResult result = null; UndoableUnitOfWorkHelper.Do("Undo stuff", "Redo stuff", m_actionHandler, () => { // Noun ILexEntry catN = m_entryFactory.Create(); IMoStemAllomorph catNForm = m_stemAlloFactory.Create(); catN.AlternateFormsOS.Add(catNForm); catNForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("catNTEST", m_vernacularWS.Handle); IMoStemMsa catNMsa = m_stemMsaFactory.Create(); catN.MorphoSyntaxAnalysesOC.Add(catNMsa); ILexEntry sPl = m_entryFactory.Create(); IMoAffixAllomorph sPlForm = m_afxAlloFactory.Create(); sPl.AlternateFormsOS.Add(sPlForm); sPlForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("sPLTEST", m_vernacularWS.Handle); IMoInflAffMsa sPlMsa = m_inflAffMsaFactory.Create(); sPl.MorphoSyntaxAnalysesOC.Add(sPlMsa); // Verb ILexEntry catV = m_entryFactory.Create(); IMoStemAllomorph catVForm = m_stemAlloFactory.Create(); catV.AlternateFormsOS.Add(catVForm); catVForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("catVTEST", m_vernacularWS.Handle); IMoStemMsa catVMsa = m_stemMsaFactory.Create(); catV.MorphoSyntaxAnalysesOC.Add(catVMsa); ILexEntry sAgr = m_entryFactory.Create(); IMoAffixAllomorph sAgrForm = m_afxAlloFactory.Create(); sAgr.AlternateFormsOS.Add(sAgrForm); sAgrForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("sAGRTEST", m_vernacularWS.Handle); IMoInflAffMsa sAgrMsa = m_inflAffMsaFactory.Create(); sAgr.MorphoSyntaxAnalysesOC.Add(sAgrMsa); result = new ParseResult(new[] { new ParseAnalysis(new[] { new ParseMorph(catNForm, catNMsa), new ParseMorph(sPlForm, sPlMsa) }), new ParseAnalysis(new[] { new ParseMorph(catVForm, catVMsa), new ParseMorph(sAgrForm, sAgrMsa) }) }); }); m_filer.ProcessParse(catsTest, ParserPriority.Low, result); ExecuteIdleQueue(); CheckAnalysisSize("catsTEST", 2, false); }
public void SetAndAccessDummyList() { ILexDb lexDb = Cache.LangProject.LexDbOA; ILexEntry entry1 = null; ICmResource res1 = null; NonUndoableUnitOfWorkHelper.Do(m_actionHandler, () => { var leFactory = Cache.ServiceLocator.GetInstance <ILexEntryFactory>(); entry1 = leFactory.Create(); ILexEntry entry2 = leFactory.Create(); res1 = Cache.ServiceLocator.GetInstance <ICmResourceFactory>().Create(); lexDb.ResourcesOC.Add(res1); }); int hvoRoot = 10578; ObjectListPublisher publisher = new ObjectListPublisher(Cache.MainCacheAccessor as ISilDataAccessManaged, ObjectListFlid); var values = new int[] { 23, 56, 2048 }; Notifiee recorder = new Notifiee(); publisher.AddNotification(recorder); publisher.CacheVecProp(hvoRoot, values); Assert.AreEqual(values.Length, publisher.get_VecSize(hvoRoot, ObjectListFlid), "override of vec size"); //Assert.AreEqual(Cache.LangProject.Texts.Count, publisher.get_VecSize(Cache.LangProject.Hvo, LangProjectTags.kflidTexts), "base vec size"); Assert.AreEqual(23, publisher.get_VecItem(hvoRoot, ObjectListFlid, 0), "override of vec item"); Assert.AreEqual(res1.Hvo, publisher.get_VecItem(lexDb.Hvo, LexDbTags.kflidResources, 0), "base vec item"); Assert.AreEqual(56, publisher.get_VecItem(hvoRoot, ObjectListFlid, 1), "override of vec item, non-zero index"); VerifyCurrentValue(hvoRoot, publisher, values, "original value"); Assert.AreEqual(lexDb.ResourcesOC.Count(), publisher.VecProp(lexDb.Hvo, LexDbTags.kflidResources).Length, "base VecProp"); recorder.CheckChanges(new ChangeInformationTest[] { new ChangeInformationTest(hvoRoot, ObjectListFlid, 0, values.Length, 0) }, "expected PropChanged from caching HVOs"); publisher.RemoveNotification(recorder); recorder = new Notifiee(); publisher.AddNotification(recorder); publisher.Replace(hvoRoot, 1, new int[] { 97, 98 }, 0); VerifyCurrentValue(hvoRoot, publisher, new int[] { 23, 97, 98, 56, 2048 }, "after inserting 97, 98"); recorder.CheckChanges(new ChangeInformationTest[] { new ChangeInformationTest(hvoRoot, ObjectListFlid, 1, 2, 0) }, "expected PropChanged from caching HVOs"); publisher.RemoveNotification(recorder); recorder = new Notifiee(); publisher.AddNotification(recorder); publisher.Replace(hvoRoot, 1, new int[0], 2); VerifyCurrentValue(hvoRoot, publisher, new int[] { 23, 56, 2048 }, "after deleting 97, 98"); recorder.CheckChanges(new ChangeInformationTest[] { new ChangeInformationTest(hvoRoot, ObjectListFlid, 1, 0, 2) }, "expected PropChanged from caching HVOs"); publisher.RemoveNotification(recorder); }
public void ContainsMethodTests() { IFdoServiceLocator servLoc = Cache.ServiceLocator; ILangProject lp = Cache.LanguageProject; ILexDb lexDb = lp.LexDbOA; ILexEntry le = servLoc.GetInstance <ILexEntryFactory>().Create(); // FdoReferenceCollection Assert.IsFalse(lexDb.LexicalFormIndexRC.Contains(le)); lexDb.LexicalFormIndexRC.Add(le); Assert.IsTrue(lexDb.LexicalFormIndexRC.Contains(le)); lexDb.LexicalFormIndexRC.Remove(le); }
/// <summary/> public FlexStylesXmlAccessor(ILexDb lexicon, bool loadDocument = false, string sourceDocument = null) : base(lexicon.Cache) { m_sourceDocumentPath = sourceDocument; m_lexicon = lexicon; if (loadDocument) { m_sourceStyles = LoadDoc(sourceDocument); if (!string.IsNullOrEmpty(sourceDocument)) { CreateStyles(new Common.FwUtils.ConsoleProgress(), new object[] { m_cache.LangProject.StylesOC, m_sourceStyles, false }); } } }
public void TwoWordforms() { int hvoBearTEST = CheckAnalysisSize("bearTEST", 0, true); int hvoBullTEST = CheckAnalysisSize("bullTEST", 0, true); string xmlFragment = ""; using (FdoCache cache = FdoCache.Create("TestLangProj")) { ILexDb ldb = cache.LangProject.LexDbOA; // Bear ILexEntry bearN = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(bearN.Hvo); IMoStemAllomorph bearNForm = (IMoStemAllomorph)bearN.AlternateFormsOS.Append(new MoStemAllomorph()); bearNForm.Form.VernacularDefaultWritingSystem = "bearNTEST"; IMoStemMsa bearNMSA = (IMoStemMsa)bearN.MorphoSyntaxAnalysesOC.Add(new MoStemMsa()); // Bull ILexEntry bullN = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(bullN.Hvo); IMoStemAllomorph bullNForm = (IMoStemAllomorph)bullN.AlternateFormsOS.Append(new MoStemAllomorph()); bullNForm.Form.VernacularDefaultWritingSystem = "bullNTEST"; IMoStemMsa bullNMSA = (IMoStemMsa)bullN.MorphoSyntaxAnalysesOC.Add(new MoStemMsa()); xmlFragment = "<Wordform DbRef='" + hvoBearTEST.ToString() + "' Form='bearTEST'>\n" + "<WfiAnalysis>\n" + "<Morphs>\n" + "<Morph>\n" + "<MoForm DbRef='" + bearNForm.Hvo.ToString() + "' Label='bearNTEST'/>\n" + "<MSI DbRef='" + bearNMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "</Morphs>\n" + "</WfiAnalysis>\n" + "</Wordform>\n" + "<Wordform DbRef='" + hvoBullTEST.ToString() + "' Form='bullTEST'>\n" + "<WfiAnalysis>\n" + "<Morphs>\n" + "<Morph>\n" + "<MoForm DbRef='" + bullNForm.Hvo.ToString() + "' Label='bullNTEST'/>\n" + "<MSI DbRef='" + bullNMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "</Morphs>\n" + "</WfiAnalysis>\n" + "</Wordform>\n"; } m_filer.ProcessParse(MakeXML(xmlFragment, true)); CheckAnalysisSize("bearTEST", 1, false); CheckAnalysisSize("bullTEST", 1, false); }
public void MultilingualStringBasedonStringDictionary() { FdoReferenceSequence <ILgWritingSystem> systems = Cache.LangProject.CurVernWssRS; ILexDb ld = Cache.LangProject.LexDbOA; ILexEntry le = MakeLexEntry(ld, "xyzTest1", "xyzDefn1.1", 0); IMoForm m = MoForm.MakeMorph(Cache, le, "-is"); m.Form.SetAlternative("iz", systems[1].Hvo); string t = "<entry><objAtomic objProperty='LexemeFormOA'/></entry>"; string c = "<class name='MoForm'><multilingualStringElement name='form' simpleProperty='NamesWithMarkers'/></class>"; string result = GetResultStringFromEntry(le, t, c); Assert.AreEqual("<entry><form ws=\"fr\">-is</form>\r\n<form ws=\"ur\">-iz</form>\r\n</entry>", result.Trim()); }
public void ModifyMajorObject() { CheckDisposed(); ILexDb ldb = Cache.LangProject.LexDbOA; ILexEntry le = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); DateTime current = le.DateModified.Subtract(TimeSpan.FromMinutes(2.0)); m_cmtManager.Disabled = true; // prevent resetting as a side-effect of the change! le.DateModified = current; m_cmtManager.Disabled = false; le.CitationForm.VernacularDefaultWritingSystem = "abc"; Assert.IsTrue(DateTime.Compare(le.DateModified, current) > 0, "modify time increased setting string prop"); }
/// <summary> /// Executes in two distinct scenarios. /// /// 1. If disposing is true, the method has been called directly /// or indirectly by a user's code via the Dispose method. /// Both managed and unmanaged resources can be disposed. /// /// 2. If disposing is false, the method has been called by the /// runtime from inside the finalizer and you should not reference (access) /// other managed objects, as they already have been garbage collected. /// Only unmanaged resources can be disposed. /// </summary> /// <param name="disposing"></param> /// <remarks> /// If any exceptions are thrown, that is fine. /// If the method is being done in a finalizer, it will be ignored. /// If it is thrown by client code calling Dispose, /// it needs to be handled by fixing the bug. /// /// If subclasses override this method, they should call the base implementation. /// </remarks> protected override void Dispose(bool disposing) { //Debug.WriteLineIf(!disposing, "****************** " + GetType().Name + " 'disposing' is false. ******************"); // Must not be run more than once. if (IsDisposed) return; if (disposing) { // Dispose managed resources here. } // Dispose unmanaged resources here, whether disposing is true or false. m_entriesCol = null; m_ldb = null; base.Dispose(disposing); }
private static void SetMinimalPublicationType(ILexDb lexDb) { var servLoc = lexDb.Cache.ServiceLocator; var wsMgr = servLoc.WritingSystemManager; var eng = wsMgr.UserWritingSystem.Handle; lexDb.PublicationTypesOA.Name.set_String(eng, "Publications"); var possibilityFactory = servLoc.GetInstance <ICmPossibilityFactory>() as ICmPossibilityFactoryInternal; var dataReader = (IDataReader)servLoc.GetInstance <IDataSetup>(); var poss = possibilityFactory.Create( Guid.NewGuid(), dataReader.GetNextRealHvo(), lexDb.PublicationTypesOA, 0); poss.Name.set_String(eng, "Main Dictionary"); poss.Abbreviation.set_String(eng, "Main"); poss.IsProtected = true; }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// <param name="ld"></param> /// <param name="cf"></param> /// <param name="defn"></param> /// <param name="hvoDomain"></param> /// <returns></returns> /// ------------------------------------------------------------------------------------ protected ILexEntry MakeLexEntry(ILexDb ld, string cf, string defn, int hvoDomain) { ILexEntry le = ld.EntriesOC.Add(new LexEntry()); le.CitationForm.VernacularDefaultWritingSystem = cf; ILexSense ls = le.SensesOS.Append(new LexSense()); ls.Definition.AnalysisDefaultWritingSystem.Text = defn; if (hvoDomain != 0) { ls.SemanticDomainsRC.Add(hvoDomain); } MoMorphSynAnalysis msa = new MoStemMsa(); le.MorphoSyntaxAnalysesOC.Add(msa); ls.MorphoSyntaxAnalysisRA = msa; return(le); }
public void TwoWordforms() { IWfiWordform snake = CheckAnalysisSize("snakeTEST", 0, true); IWfiWordform bull = CheckAnalysisSize("bullTEST", 0, true); ILexDb ldb = Cache.LanguageProject.LexDbOA; ParseResult result = null; UndoableUnitOfWorkHelper.Do("Undo stuff", "Redo stuff", m_actionHandler, () => { // Snake ILexEntry snakeN = m_entryFactory.Create(); IMoStemAllomorph snakeNForm = m_stemAlloFactory.Create(); snakeN.AlternateFormsOS.Add(snakeNForm); snakeNForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("snakeNTEST", m_vernacularWS.Handle); IMoStemMsa snakeNMsa = m_stemMsaFactory.Create(); snakeN.MorphoSyntaxAnalysesOC.Add(snakeNMsa); // Bull ILexEntry bullN = m_entryFactory.Create(); IMoStemAllomorph bullNForm = m_stemAlloFactory.Create(); bullN.AlternateFormsOS.Add(bullNForm); bullNForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("bullNTEST", m_vernacularWS.Handle); IMoStemMsa bullNMsa = m_stemMsaFactory.Create(); bullN.MorphoSyntaxAnalysesOC.Add(bullNMsa); result = new ParseResult(new[] { new ParseAnalysis(new[] { new ParseMorph(snakeNForm, snakeNMsa) }), new ParseAnalysis(new[] { new ParseMorph(bullNForm, bullNMsa) }) }); }); m_filer.ProcessParse(snake, ParserPriority.Low, result); ExecuteIdleQueue(); CheckAnalysisSize("snakeTEST", 1, false); CheckAnalysisSize("bullTEST", 1, false); }
public void FdoReferenceCollection() { CheckDisposed(); ILexDb ldb = Cache.LangProject.LexDbOA; ldb.EntriesOC.Add(new LexEntry()); ldb.EntriesOC.Add(new LexEntry()); // Gather up all entries in the DB. int iLESize = Cache.GetVectorSize(ldb.Hvo, (int)LexDb.LexDbTags.kflidEntries); FdoOwningCollection <ILexEntry> ocvLexEntriesOC = ldb.EntriesOC; int[] ahvoLexEntriesOC = ocvLexEntriesOC.HvoArray; // Check sizes. They should be the same. Assert.AreEqual(iLESize, ahvoLexEntriesOC.Length, "Mis-matched number of entries."); FdoReferenceCollection <ILexEntry> rcLexEntriesRCBefore = ldb.LexicalFormIndexRC; int[] ahvoLexEntriesRCBefore = rcLexEntriesRCBefore.HvoArray; int iOldRCSize = ahvoLexEntriesRCBefore.Length; // Add all entries to reference collection. rcLexEntriesRCBefore.Add(ahvoLexEntriesOC); // Make sure they are there now. FdoReferenceCollection <ILexEntry> rcLexEntriesRCAfter = ldb.LexicalFormIndexRC; int[] ahvoLexEntriesRCAfter = rcLexEntriesRCAfter.HvoArray; Assert.AreEqual((ahvoLexEntriesOC.Length + iOldRCSize), ahvoLexEntriesRCAfter.Length, "Mis-matched number of entries in reference collection."); // Size of ahvoLexEntriesRCAfter is right, so quit. // Note: One could check the IDs, but it probably isn't needed, // as long as nobody else was messing with database at the same time. // Try adding a duplicate item to reference collection. // The size should be the same before as after. rcLexEntriesRCAfter.Add(ahvoLexEntriesOC[0]); Assert.AreEqual(ahvoLexEntriesRCAfter.Length, rcLexEntriesRCAfter.HvoArray.Length, "Mis-matched number of entries in reference collection."); }
public void VectorRemove_ReferenceCollection() { CheckDisposed(); ILexDb ldb = Cache.LangProject.LexDbOA; ILexEntry lme = ldb.EntriesOC.Add(new LexEntry()); ILexSense ls = lme.SensesOS.Append(new LexSense()); FdoOwningSequence <ICmPossibility> usageSeq = ldb.UsageTypesOA.PossibilitiesOS; usageSeq.Append(new CmPossibility()); usageSeq.Append(new CmPossibility()); usageSeq.Append(new CmPossibility()); int[] usageTypes = new int[3]; usageTypes[0] = usageSeq[0].Hvo; usageTypes[1] = usageSeq[1].Hvo; usageTypes[2] = usageSeq[2].Hvo; ls.UsageTypesRC.Add(usageTypes); ls.UsageTypesRC.Remove(usageSeq[1].Hvo); Assert.AreEqual(2, ls.UsageTypesRC.Count); }
public void TwoAnalyses() { int hvoBearTEST = CheckAnalysisSize("bearsTEST", 0, true); string xmlFragment = ""; using (FdoCache cache = FdoCache.Create("TestLangProj")) { ILexDb ldb = cache.LangProject.LexDbOA; // Noun ILexEntry bearN = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(bearN.Hvo); IMoStemAllomorph bearNForm = (IMoStemAllomorph)bearN.AlternateFormsOS.Append(new MoStemAllomorph()); bearNForm.Form.VernacularDefaultWritingSystem = "bearNTEST"; IMoStemMsa bearNMSA = (IMoStemMsa)bearN.MorphoSyntaxAnalysesOC.Add(new MoStemMsa()); ILexEntry sPL = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(sPL.Hvo); IMoAffixAllomorph sPLForm = (IMoAffixAllomorph)sPL.AlternateFormsOS.Append(new MoAffixAllomorph()); sPLForm.Form.VernacularDefaultWritingSystem = "sPLTEST"; IMoInflAffMsa sPLMSA = (IMoInflAffMsa)sPL.MorphoSyntaxAnalysesOC.Add(new MoInflAffMsa()); // Verb ILexEntry bearV = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(bearV.Hvo); IMoStemAllomorph bearVForm = (IMoStemAllomorph)bearV.AlternateFormsOS.Append(new MoStemAllomorph()); bearVForm.Form.VernacularDefaultWritingSystem = "bearVTEST"; IMoStemMsa bearVMSA = (IMoStemMsa)bearV.MorphoSyntaxAnalysesOC.Add(new MoStemMsa()); ILexEntry sAGR = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(sAGR.Hvo); IMoAffixAllomorph sAGRForm = (IMoAffixAllomorph)sAGR.AlternateFormsOS.Append(new MoAffixAllomorph()); sAGRForm.Form.VernacularDefaultWritingSystem = "sAGRTEST"; IMoInflAffMsa sAGRMSA = (IMoInflAffMsa)sAGR.MorphoSyntaxAnalysesOC.Add(new MoInflAffMsa()); xmlFragment = "<Wordform DbRef='" + hvoBearTEST.ToString() + "' Form='bearsTEST'>\n" + "<WfiAnalysis>\n" + "<Morphs>\n" + "<Morph>\n" + "<MoForm DbRef='" + bearNForm.Hvo.ToString() + "' Label='bearNTEST'/>\n" + "<MSI DbRef='" + bearNMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "<Morph>\n" + "<MoForm DbRef='" + sPLForm.Hvo.ToString() + "' Label='sPLTEST'/>\n" + "<MSI DbRef='" + sPLMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "</Morphs>\n" + "</WfiAnalysis>\n" + "<WfiAnalysis>\n" + "<Morphs>\n" + "<Morph>\n" + "<MoForm DbRef='" + bearVForm.Hvo.ToString() + "' Label='bearVTEST'/>\n" + "<MSI DbRef='" + bearVMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "<Morph>\n" + "<MoForm DbRef='" + sAGRForm.Hvo.ToString() + "' Label='sAGRTEST'/>\n" + "<MSI DbRef='" + sAGRMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "</Morphs>\n" + "</WfiAnalysis>\n" + "</Wordform>\n"; } m_filer.ProcessParse(MakeXML(xmlFragment, true)); CheckAnalysisSize("bearsTEST", 2, false); }
public void DuplicateAnalysesApproval() { int hvoBearTEST = CheckAnalysisSize("bearTEST", 0, true); string xmlFragment = ""; int anal1Hvo; int anal2Hvo; int anal3Hvo; using (FdoCache cache = FdoCache.Create("TestLangProj")) { IWfiAnalysis anal = null; ILexDb ldb = cache.LangProject.LexDbOA; // Bear entry ILexEntry bearN = (ILexEntry)ldb.EntriesOC.Add(new LexEntry()); AddIdToList(bearN.Hvo); IMoStemAllomorph bearNForm = (IMoStemAllomorph)bearN.AlternateFormsOS.Append(new MoStemAllomorph()); bearNForm.Form.VernacularDefaultWritingSystem = "bearNTEST"; IMoStemMsa bearNMSA = (IMoStemMsa)bearN.MorphoSyntaxAnalysesOC.Add(new MoStemMsa()); ILexSense bearNLS = (ILexSense)bearN.SensesOS.Append(new LexSense());; IWfiWordform wf = WfiWordform.CreateFromDBObject(cache, hvoBearTEST); // First of two duplicate analyses anal = (IWfiAnalysis)wf.AnalysesOC.Add(new WfiAnalysis()); anal1Hvo = anal.Hvo; IWfiMorphBundle mb = (IWfiMorphBundle)anal.MorphBundlesOS.Append(new WfiMorphBundle()); mb.MorphRA = bearNForm; mb.MsaRA = bearNMSA; CheckEvaluationSize(anal1Hvo, 0, true, "anal1Hvo"); // Non-duplicate, to make sure it does not get approved. anal = (IWfiAnalysis)wf.AnalysesOC.Add(new WfiAnalysis()); anal2Hvo = anal.Hvo; mb = (IWfiMorphBundle)anal.MorphBundlesOS.Append(new WfiMorphBundle()); mb.SenseRA = bearNLS; CheckEvaluationSize(anal2Hvo, 0, true, "anal2Hvo"); // Second of two duplicate analyses anal = (IWfiAnalysis)wf.AnalysesOC.Add(new WfiAnalysis()); anal3Hvo = anal.Hvo; mb = (IWfiMorphBundle)anal.MorphBundlesOS.Append(new WfiMorphBundle()); mb.MorphRA = bearNForm; mb.MsaRA = bearNMSA; CheckEvaluationSize(anal3Hvo, 0, true, "anal3Hvo"); CheckAnalysisSize("bearTEST", 3, false); xmlFragment = "<Wordform DbRef='" + hvoBearTEST.ToString() + "' Form='bearTEST'>\n" + "<WfiAnalysis>\n" + "<Morphs>\n" + "<Morph>\n" + "<MoForm DbRef='" + bearNForm.Hvo.ToString() + "' Label='bearNTEST'/>\n" + "<MSI DbRef='" + bearNMSA.Hvo.ToString() + "'/>\n" + "</Morph>\n" + "</Morphs>\n" + "</WfiAnalysis>\n" + "</Wordform>\n"; } m_filer.ProcessParse(MakeXML(xmlFragment, true)); CheckEvaluationSize(anal1Hvo, 1, false, "anal1Hvo"); CheckEvaluationSize(anal2Hvo, 0, false, "anal2Hvo"); CheckEvaluationSize(anal3Hvo, 1, false, "anal3Hvo"); }
/// ------------------------------------------------------------------------------------- /// <summary> /// Constructor /// </summary> /// <param name="lexicon">The lexical database</param> /// ------------------------------------------------------------------------------------- public FlexStylesXmlAccessor(ILexDb lexicon) { m_lexicon = lexicon; }
public void ParserDataChanges() { XmlNode node; #if !ShowDumpResult m_fxtResult.Save(Path.Combine(System.IO.Path.GetTempPath(), "TestFxtUpdateBefore.xml")); #endif // ------------- // Make data changes // ------------- // Make a change to stem allomorph ILangProject lp = Cache.LangProject; ILexDb lexdb = lp.LexDbOA; int[] aiLexEntries = lexdb.EntriesOC.HvoArray; int hvoLexEntry = aiLexEntries[0]; ILexEntry lexEntry = CmObject.CreateFromDBObject(Cache, hvoLexEntry) as ILexEntry; Assert.IsNotNull(lexEntry); IMoStemAllomorph stemAllomorph = lexEntry.LexemeFormOA as IMoStemAllomorph; Assert.IsNotNull(stemAllomorph); stemAllomorph.Form.SetAlternative("bili-changed", Cache.DefaultVernWs); int hvoStemAllomorph = stemAllomorph.Hvo; stemAllomorph.IsAbstract = true; // Delete an affix allomorph hvoLexEntry = aiLexEntries[3]; lexEntry = CmObject.CreateFromDBObject(Cache, hvoLexEntry) as ILexEntry; Assert.IsNotNull(lexEntry); IMoAffixAllomorph affixAllomorph = lexEntry.AlternateFormsOS[1] as IMoAffixAllomorph; Assert.IsNotNull(affixAllomorph); int hvoAffixAllomorph = affixAllomorph.Hvo; lexEntry.AlternateFormsOS.RemoveAt(1); Cache.PropChanged(null, PropChangeType.kpctNotifyAll, hvoLexEntry, (int)LexEntry.LexEntryTags.kflidAlternateForms, 1, 0, 1); // Add a new affix allomorph IMoAffixAllomorph newAffixAllomorph = new MoAffixAllomorph(); lexEntry.AlternateFormsOS.Append(newAffixAllomorph); newAffixAllomorph.Form.SetAlternative("him-new", Cache.DefaultVernWs); int hvoNewAffixAllomorph = newAffixAllomorph.Hvo; Cache.PropChanged(null, PropChangeType.kpctNotifyAll, hvoLexEntry, (int)LexEntry.LexEntryTags.kflidAlternateForms, lexEntry.AlternateFormsOS.Count - 1, 1, 0); // add a compound rule IMoMorphData morphData = lp.MorphologicalDataOA; IMoEndoCompound compRuleNew = new MoEndoCompound(); morphData.CompoundRulesOS.Append(compRuleNew); string sCompRuleName = "new compound rule"; compRuleNew.Name.AnalysisDefaultWritingSystem = sCompRuleName; compRuleNew.HeadLast = true; int hvoPOS = lp.PartsOfSpeechOA.PossibilitiesOS.FirstItem.Hvo; compRuleNew.LeftMsaOA.PartOfSpeechRAHvo = hvoPOS; compRuleNew.RightMsaOA.PartOfSpeechRAHvo = hvoPOS; compRuleNew.OverridingMsaOA.PartOfSpeechRAHvo = hvoPOS; // Change compound rule description const string ksCompRuleDescription = "new description"; compRuleNew.Description.AnalysisDefaultWritingSystem.Text = ksCompRuleDescription; Cache.PropChanged(null, PropChangeType.kpctNotifyAll, morphData.Hvo, (int)MoMorphData.MoMorphDataTags.kflidCompoundRules, morphData.CompoundRulesOS.Count - 1, 1, 0); // delete a compound rule IMoExoCompound compRuleDeleted = morphData.CompoundRulesOS.FirstItem as IMoExoCompound; int hvoCompRuleDeletedLeftMsa = compRuleDeleted.LeftMsaOAHvo; int hvoCompRuleDeletedRightMsa = compRuleDeleted.RightMsaOAHvo; int hvoCompRuleDeletedToMsa = compRuleDeleted.ToMsaOAHvo; morphData.CompoundRulesOS.RemoveAt(0); // add an ad hoc co-prohibition IMoAlloAdhocProhib alloAdHoc = new MoAlloAdhocProhib(); morphData.AdhocCoProhibitionsOC.Add(alloAdHoc); alloAdHoc.Adjacency = 2; alloAdHoc.FirstAllomorphRAHvo = hvoNewAffixAllomorph; alloAdHoc.RestOfAllosRS.Append(hvoNewAffixAllomorph); // change a "rest of allos" in extant ad hoc co-prohibition int[] hvosAdHocProhibs = morphData.AdhocCoProhibitionsOC.HvoArray; IMoAlloAdhocProhib alloAdHocOld = CmObject.CreateFromDBObject(Cache, hvosAdHocProhibs[9]) as IMoAlloAdhocProhib; IMoAffixAllomorph alloAdHicOldFirstRestOfAllos = alloAdHocOld.RestOfAllosRS.FirstItem as IMoAffixAllomorph; IMoAffixAllomorph affixAllomorph2 = lexEntry.AlternateFormsOS[0] as IMoAffixAllomorph; alloAdHocOld.RestOfAllosRS.Append(affixAllomorph2); alloAdHocOld.RestOfAllosRS.RemoveAt(0); alloAdHocOld.Adjacency = 2; //Add a new productivity restriction ICmPossibilityList prodRestricts = morphData.ProdRestrictOA; ICmPossibility prodRestriction = new CmPossibility(); prodRestricts.PossibilitiesOS.Append(prodRestriction); string sNewProdRestrictName = "new exception feature"; prodRestriction.Name.AnalysisDefaultWritingSystem = sNewProdRestrictName; Cache.PropChanged(null, PropChangeType.kpctNotifyAll, prodRestricts.Hvo, (int)CmPossibilityList.CmPossibilityListTags.kflidPossibilities, prodRestricts.PossibilitiesOS.Count - 1, 1, 0); // Change a phonological enviroment string representation IPhPhonData phonData = lp.PhonologicalDataOA; IPhEnvironment env = phonData.EnvironmentsOS.FirstItem; const string ksEnvStringRep = "/ _ [C] [V] a e i o u"; env.StringRepresentation.Text = ksEnvStringRep; // Add a new phonological enviroment string representation IPhEnvironment envNew = new PhEnvironment(); phonData.EnvironmentsOS.Append(envNew); envNew.StringRepresentation.Text = "/ _ m"; int hvoPhonData = phonData.Hvo; Cache.PropChanged(null, PropChangeType.kpctNotifyAll, hvoPhonData, (int)PhPhonData.PhPhonDataTags.kflidEnvironments, phonData.EnvironmentsOS.Count - 1, 1, 0); // Change parser parameters (to test Unicode string field type) string sParserParameters = morphData.ParserParameters.Trim(); int i = sParserParameters.IndexOf("</ParserParameters>"); string sNewParserParameters = sParserParameters.Substring(0, i) + "<HermitCrab><stuff>1</stuff></HermitCrab>" + "</ParserParameters>"; morphData.ParserParameters = sNewParserParameters; // Delete a lex entry int[] hvosEntries = lexdb.EntriesOC.HvoArray; int hvoEntryDeleted = hvosEntries[hvosEntries.Length - 4]; ILexEntry entryDeleted = CmObject.CreateFromDBObject(Cache, hvoEntryDeleted) as ILexEntry; int hvoEntryDeletedLexemeForm = entryDeleted.LexemeFormOAHvo; int[] hvosEntryDeletedAlternateForms = entryDeleted.AlternateFormsOS.HvoArray; int[] hvosEntryDeletedMSAs = entryDeleted.MorphoSyntaxAnalysesOC.HvoArray; int[] hvosEntryDeletedSenses = entryDeleted.SensesOS.HvoArray; //entryDeleted.LexemeFormOA.DeleteUnderlyingObject(); lexdb.EntriesOC.Remove(hvosEntries[hvosEntries.Length - 4]); //Cache.PropChanged(null, PropChangeType.kpctNotifyAll, morphData.Hvo, (int)MoMorphData.MoMorphDataTags.kflidParserParameters, 0, 0, 0); // Create a new lex entry ILexEntry entryNew = new LexEntry(); lexdb.EntriesOC.Add(entryNew); IMoAffixAllomorph alloNew = new MoAffixAllomorph(); entryNew.LexemeFormOA = alloNew; string sNewAlloForm = "dem"; alloNew.Form.VernacularDefaultWritingSystem = sNewAlloForm; alloNew.MorphTypeRA = (IMoMorphType)lexdb.MorphTypesOA.LookupPossibilityByGuid(new Guid(MoMorphType.kguidMorphPrefix)); IMoAffixAllomorph alloNew2 = new MoAffixAllomorph(); entryNew.AlternateFormsOS.Append(alloNew2); string sNewAlloForm2 = "den"; alloNew2.Form.VernacularDefaultWritingSystem = sNewAlloForm2; alloNew2.MorphTypeRA = (IMoMorphType)lexdb.MorphTypesOA.LookupPossibilityByGuid(new Guid(MoMorphType.kguidMorphPrefix)); Cache.PropChanged(null, PropChangeType.kpctNotifyAll, entryNew.Hvo, (int)LexEntry.LexEntryTags.kflidAlternateForms, entryNew.AlternateFormsOS.Count - 1, 1, 0); ILexSense sense = new LexSense(); entryNew.SensesOS.Append(sense); string sGloss = "MeToo"; sense.Gloss.AnalysisDefaultWritingSystem = sGloss; IMoInflAffMsa inflAffixMsa = new MoInflAffMsa(); entryNew.MorphoSyntaxAnalysesOC.Add(inflAffixMsa); sense.MorphoSyntaxAnalysisRA = inflAffixMsa; int[] hvosPOSes = lp.PartsOfSpeechOA.PossibilitiesOS.HvoArray; int hvoVerb = hvosPOSes[12]; inflAffixMsa.PartOfSpeechRAHvo = hvoVerb; IPartOfSpeech pos = CmObject.CreateFromDBObject(Cache, hvoVerb) as IPartOfSpeech; int hvoSlot = pos.AffixSlotsOC.HvoArray[2]; inflAffixMsa.SlotsRC.Add(hvoSlot); Cache.PropChanged(null, PropChangeType.kpctNotifyAll, entryNew.Hvo, (int)LexEntry.LexEntryTags.kflidSenses, entryNew.SensesOS.Count - 1, 1, 0); // Add an inflectional template int[] hvoVerbSubCats = pos.SubPossibilitiesOS.HvoArray; int hvoIntransVerb = hvoVerbSubCats[2]; IPartOfSpeech posVI = CmObject.CreateFromDBObject(Cache, hvoIntransVerb) as IPartOfSpeech; IMoInflAffixTemplate affixTemplate = new MoInflAffixTemplate(); posVI.AffixTemplatesOS.Append(affixTemplate); affixTemplate.Name.AnalysisDefaultWritingSystem = "derived verb"; affixTemplate.Final = false; affixTemplate.SuffixSlotsRS.Append(hvoSlot); Cache.PropChanged(null, PropChangeType.kpctNotifyAll, posVI.Hvo, (int)PartOfSpeech.PartOfSpeechTags.kflidAffixTemplates, posVI.AffixTemplatesOS.Count - 1, 1, 0); // add a phonological feature IFsClosedFeature consFeat = new FsClosedFeature(); Cache.LangProject.PhFeatureSystemOA.FeaturesOC.Add(consFeat); consFeat.Name.AnalysisDefaultWritingSystem = "consonantal"; consFeat.Abbreviation.AnalysisDefaultWritingSystem = "cons"; IFsSymFeatVal consPlus = new FsSymFeatVal(); consFeat.ValuesOC.Add(consPlus); consPlus.SimpleInit("+", "positive"); IFsSymFeatVal consMinus = new FsSymFeatVal(); consFeat.ValuesOC.Add(consMinus); consMinus.SimpleInit("-", "negative"); IFsFeatStrucType fsType = null; if (Cache.LangProject.PhFeatureSystemOA.TypesOC.Count == 0) { fsType = new FsFeatStrucType(); Cache.LangProject.PhFeatureSystemOA.TypesOC.Add(fsType); fsType.Abbreviation.AnalysisDefaultWritingSystem = "Phon"; } else { foreach (IFsFeatStrucType type in Cache.LangProject.PhFeatureSystemOA.TypesOC) { fsType = type; break; } } fsType.FeaturesRS.Append(consFeat); // add a feature-based NC IPhNCFeatures featNC = new PhNCFeatures(); Cache.LangProject.PhonologicalDataOA.NaturalClassesOS.Append(featNC); featNC.Name.AnalysisDefaultWritingSystem = "Consonants (Features)"; featNC.Abbreviation.AnalysisDefaultWritingSystem = "CF"; IFsFeatStruc fs = new FsFeatStruc(); featNC.FeaturesOA = fs; IFsClosedValue val = fs.FindOrCreateClosedValue(consFeat.Hvo); val.FeatureRA = consFeat; val.ValueRA = consPlus; featNC.NotifyNew(); // add phonological rule IPhRegularRule regRule = new PhRegularRule(); Cache.LangProject.PhonologicalDataOA.PhonRulesOS.Append(regRule); regRule.NotifyNew(); regRule.Name.AnalysisDefaultWritingSystem = "regular rule"; IPhSimpleContextSeg segCtxt = new PhSimpleContextSeg(); regRule.RightHandSidesOS[0].StrucChangeOS.Append(segCtxt); IPhPhoneme phoneme = null; foreach (IPhPhoneme phon in Cache.LangProject.PhonologicalDataOA.PhonemeSetsOS[0].PhonemesOC) { phoneme = phon; break; } segCtxt.FeatureStructureRA = phoneme; segCtxt.NotifyNew(); IPhSimpleContextNC ncCtxt = new PhSimpleContextNC(); regRule.RightHandSidesOS[0].LeftContextOA = ncCtxt; ncCtxt.FeatureStructureRA = featNC; ncCtxt.NotifyNew(); // add a morphological rule IMoAffixProcess affRule = new MoAffixProcess(); entryNew.AlternateFormsOS.Append(affRule); affRule.NotifyNew(); ncCtxt = new PhSimpleContextNC(); affRule.InputOS.Append(ncCtxt); ncCtxt.FeatureStructureRA = featNC; ncCtxt.NotifyNew(); IMoCopyFromInput copy = new MoCopyFromInput(); affRule.OutputOS.Append(copy); copy.ContentRA = ncCtxt; copy.NotifyNew(); // ----------- // Update the FXT result // ----------- XmlDocument updatedFxtResult = UpdateFXT(); // ----------- // Test the updated results // ----------- // Test changed stem allomorph: checks on MultiUnicode and boolean node = updatedFxtResult.SelectSingleNode("//MoStemAllomorph[@Id='" + hvoStemAllomorph + "']"); Assert.IsNotNull(node); Assert.AreEqual(stemAllomorph.Form.VernacularDefaultWritingSystem, node.InnerText, "stem allomorph form change failed"); XmlNode contentNode = node.SelectSingleNode("@IsAbstract"); Assert.AreEqual("1", contentNode.InnerText, "stem allomorph is abstract should be true (=1)"); // Test deleted affix allomorph: checks on owning sequence node = updatedFxtResult.SelectSingleNode("//MoAffixAllomorph[@Id='" + hvoAffixAllomorph + "']"); Assert.IsNull(node, "Deleted affix allomorph should be null"); node = updatedFxtResult.SelectSingleNode("//LexEntry[@id='" + hvoLexEntry + "']/AlternateForms[@dst='" + hvoAffixAllomorph + "']"); Assert.IsNull(node, "LexEntry should no longer have deleted alternate form"); // Test added new affix allomorph: checks on owning sequence owned by an item with an @Id; also checks on addition of MoAffixAllomorph via AllAllomorphs string sXPath = "//LexEntry[@Id='" + hvoLexEntry + "']/AlternateForms[@dst='" + hvoNewAffixAllomorph + "']"; node = updatedFxtResult.SelectSingleNode(sXPath); Assert.IsNotNull(node, "LexEntry should have added alternate form"); node = updatedFxtResult.SelectSingleNode("//MoAffixAllomorph[@Id='" + hvoNewAffixAllomorph + "']"); Assert.IsNotNull(node, "Added affix allomorph should be present"); sXPath = "//LexEntry[@Id='" + hvoLexEntry + "']"; node = updatedFxtResult.SelectSingleNode(sXPath); XmlNodeList nodes = node.SelectNodes("AlternateForms"); Assert.AreEqual(3, nodes.Count, "Expected three Alternate forms in lex entry."); //Test newly added compound rule: checks on owning sequence owned by an Id-less element; also on multistring node = updatedFxtResult.SelectSingleNode("//MoEndoCompound[@Id='" + compRuleNew.Hvo + "']"); Assert.IsNotNull(node, "did not find newly added compound rule"); contentNode = node.SelectSingleNode("@HeadLast"); Assert.IsNotNull(contentNode, "missing headlast attribute for coompound rule"); Assert.AreEqual("1", contentNode.InnerText, "compound rule headlast value differs"); contentNode = node.SelectSingleNode("Name"); Assert.IsNotNull(contentNode, "missing Name for compound rule"); Assert.AreEqual(sCompRuleName, contentNode.InnerText, "compound rule name differs"); // check on MultiString contentNode = node.SelectSingleNode("Description"); Assert.AreEqual(ksCompRuleDescription, contentNode.InnerText, "compound rule description differs"); // check on count node = updatedFxtResult.SelectSingleNode("//CompoundRules"); nodes = node.SelectNodes("MoExoCompound | MoEndoCompound"); Assert.AreEqual(6, nodes.Count, "Expected seven compound rules."); // check on owningAtom node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + compRuleNew.LeftMsaOAHvo + "']"); Assert.IsNotNull(node, "missing real MoStemMsa for LeftMsa of newly added compound rule"); node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + compRuleNew.RightMsaOAHvo + "']"); Assert.IsNotNull(node, "missing real MoStemMsa for RightMsa of newly added compound rule"); node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + compRuleNew.OverridingMsaOAHvo + "']"); Assert.IsNotNull(node, "missing real MoStemMsa for OverridingMsa of newly added compound rule"); // Test deleted compound rule node = updatedFxtResult.SelectSingleNode("//MoExoCompound[@Id='" + compRuleDeleted.Hvo + "']"); Assert.IsNull(node, "compound rule should be deleted"); node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + hvoCompRuleDeletedLeftMsa + "']"); Assert.IsNull(node, "compound rule left MSA should be deleted"); node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + hvoCompRuleDeletedRightMsa + "']"); Assert.IsNull(node, "compound rule right MSA should be deleted"); node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + hvoCompRuleDeletedToMsa + "']"); Assert.IsNull(node, "compound rule to MSA should be deleted"); //Test newly added allomorph ad hoc rule: checks on owning collection node = updatedFxtResult.SelectSingleNode("//MoAlloAdhocProhib[@Id='" + alloAdHoc.Hvo + "']"); Assert.IsNotNull(node, "did not find newly added allo ad hoc rule"); contentNode = node.SelectSingleNode("@Adjacency"); Assert.IsNotNull(contentNode, "missing adjacency attribute for allo ad hoc rule"); Assert.AreEqual("2", contentNode.InnerText, "allo ad hoc rule adjacency value differs"); contentNode = node.SelectSingleNode("FirstAllomorph"); Assert.IsNotNull(contentNode, "missing FirstAllomorph for allo ad hoc rule"); contentNode = contentNode.SelectSingleNode("@dst"); Assert.IsNotNull(contentNode, "missing dst attribute of FirstAllomorph for allo ad hoc rule"); Assert.AreEqual(hvoNewAffixAllomorph.ToString(), contentNode.InnerText, "FirstAllomorph of allo ad hoc rule differs"); contentNode = node.SelectSingleNode("RestOfAllos"); Assert.IsNotNull(contentNode, "missing RestOfAllos for allo ad hoc rule"); contentNode = contentNode.SelectSingleNode("@dst"); Assert.IsNotNull(contentNode, "missing dst attribute of RestOfAllos for allo ad hoc rule"); Assert.AreEqual(hvoNewAffixAllomorph.ToString(), contentNode.InnerText, "RestOfAllos of allo ad hoc rule differs"); // test change of a "rest of allos" in extant ad hoc co-prohibition: check on reference sequence node = updatedFxtResult.SelectSingleNode("//MoAlloAdhocProhib[@Id='" + alloAdHocOld.Hvo + "']"); Assert.IsNotNull(node, "did not find old allo ad hoc rule"); contentNode = node.SelectSingleNode("RestOfAllos"); Assert.IsNotNull(contentNode, "missing RestOfAllos for old allo ad hoc rule"); contentNode = contentNode.SelectSingleNode("@dst"); Assert.IsNotNull(contentNode, "missing dst attribute of RestOfAllos for old allo ad hoc rule"); Assert.AreEqual(affixAllomorph2.Hvo.ToString(), contentNode.InnerText, "RestOfAllos of old allo ad hoc rule differs"); nodes = node.SelectNodes("RestOfAllos"); Assert.AreEqual(1, nodes.Count, "count of RestOfAllos of old allo ad hoc rule differs"); // check on integer change contentNode = node.SelectSingleNode("@Adjacency"); Assert.AreEqual("2", contentNode.InnerText, "Adjacency differs"); node = updatedFxtResult.SelectSingleNode("//MoAffixAllomorph[@Id='" + alloAdHicOldFirstRestOfAllos.Hvo + "']"); Assert.IsNotNull(node, "Original RestOfAllos allomorph should still be present"); nodes = updatedFxtResult.SelectNodes("//MoAffixAllomorph[@Id='" + affixAllomorph2.Hvo + "']"); Assert.AreEqual(1, nodes.Count, "Should only be one instance of new allomorph in RestOfAllos"); // Test added productivity restriction: check on CmPossibilityList node = updatedFxtResult.SelectSingleNode("//ProdRestrict/CmPossibility"); Assert.IsNotNull(node, "Did not find newly added productivity restriction"); node = node.SelectSingleNode("Name"); Assert.IsNotNull(node, "Expected Name node in productivity restrictioni"); Assert.AreEqual(sNewProdRestrictName, node.InnerText, "name of productivity restriction differs"); // Test phonological environment string representation: check on string node = updatedFxtResult.SelectSingleNode("//PhEnvironment[@Id='" + env.Hvo + "']/@StringRepresentation"); Assert.AreEqual(ksEnvStringRep, node.InnerText, "phonological environment string differs"); // Test adding a phonological environment string representation: // check on case where parent of owner has Id and is class name; // also check on case where there is a comment/text node within the result nodes node = updatedFxtResult.SelectSingleNode("//PhEnvironment[@Id='" + envNew.Hvo + "']"); Assert.IsNotNull(node, "missing newly added phonological environment"); nodes = updatedFxtResult.SelectNodes("//PhEnvironment"); Assert.AreEqual(11, nodes.Count, "number of PhEnvironments differs"); // Test Parser Parameters: check on unicode string node = updatedFxtResult.SelectSingleNode("//ParserParameters"); string sResultParseParameters = node.OuterXml.Trim(); Assert.AreEqual(sNewParserParameters, sResultParseParameters, "Parser Parameters content differs"); // Test deletion of a lex entry: check on finding LexDb when there is no class LexDb in FXT file nodes = updatedFxtResult.SelectNodes("//LexEntry"); Assert.AreEqual(61, nodes.Count, "number of LexEntries differs"); node = updatedFxtResult.SelectSingleNode("//LexEntry[@Id='" + hvoEntryDeleted + "']"); Assert.IsNull(node, "Deleted lex entry should be missing"); foreach (int hvo in hvosEntryDeletedAlternateForms) { node = updatedFxtResult.SelectSingleNode("//MoStemAllomorph[@Id='" + hvo + "'] | //MoAffixAllomorph[@Id='" + hvo + "']"); Assert.IsNull(node, "deleted entry's alternate form should also be gone"); } foreach (int hvo in hvosEntryDeletedMSAs) { node = updatedFxtResult.SelectSingleNode("//MoStemMsa[@Id='" + hvo + "']"); Assert.IsNull(node, "deleted entry's msa should also be gone"); } foreach (int hvo in hvosEntryDeletedSenses) { node = updatedFxtResult.SelectSingleNode("//LexSense[@Id='" + hvo + "']"); Assert.IsNull(node, "deleted entry's lexsense should also be gone"); } node = updatedFxtResult.SelectSingleNode("//MoStemAllomorph[@Id='" + hvoEntryDeletedLexemeForm + "']"); Assert.IsNull(node, "deleted entry's lexeme form should also be gone"); // Test adding new entry node = updatedFxtResult.SelectSingleNode("//LexEntry[@Id='" + entryNew.Hvo + "']"); Assert.IsNotNull(node, "new lex entry is missing"); contentNode = node.SelectSingleNode("LexemeForm[@dst='" + alloNew.Hvo + "']"); Assert.IsNotNull(contentNode, "missing lexeme form for new entry"); contentNode = node.SelectSingleNode("AlternateForms[@dst='" + alloNew2.Hvo + "']"); Assert.IsNotNull(contentNode, "missing alternate form in new lex entry"); contentNode = node.SelectSingleNode("Sense[@dst='" + sense.Hvo + "']"); Assert.IsNotNull(contentNode, "missing sense in new lex entry"); contentNode = node.SelectSingleNode("MorphoSyntaxAnalysis[@dst='" + inflAffixMsa.Hvo + "']"); Assert.IsNotNull(contentNode, "missing msa in new lex entry"); contentNode = node.SelectSingleNode("AlternateForms[@dst='" + affRule.Hvo + "']"); Assert.IsNotNull(contentNode, "missing affix process rule in new lex entry"); node = updatedFxtResult.SelectSingleNode("//MoAffixAllomorph[@Id='" + alloNew.Hvo + "']"); Assert.IsNotNull(node, "new lexeme form affix allomorph for new lex entry is missing"); contentNode = node.SelectSingleNode("@MorphType"); Assert.IsNotNull(contentNode, "@MorphType missing for new MoAffixAllomorph in lexeme form of new lex entry"); IMoMorphType typeNew = MoMorphType.CreateFromDBObject(Cache, Convert.ToInt32(contentNode.InnerText)); string sGuidNew = typeNew.Guid.ToString(); Assert.AreEqual(MoMorphType.kguidMorphPrefix, sGuidNew, "morph type wrong for new MoAffixAllomorph in lexeme form of new lex entry"); contentNode = node.SelectSingleNode("Form"); Assert.IsNotNull(contentNode, "Form missing for new MoAffixAllomorph in lexeme form new lex entry"); Assert.AreEqual(sNewAlloForm, contentNode.InnerText, "form wrong for new MoAffixAllomorph in lexeme form of new lex entry"); node = updatedFxtResult.SelectSingleNode("//MoAffixAllomorph[@Id='" + alloNew2.Hvo + "']"); Assert.IsNotNull(node, "new alternate form affix allomorph for new lex entry is missing"); contentNode = node.SelectSingleNode("@MorphType"); Assert.IsNotNull(contentNode, "@MorphType missing for new MoAffixAllomorph in alternate form of new lex entry"); typeNew = MoMorphType.CreateFromDBObject(Cache, Convert.ToInt32(contentNode.InnerText)); sGuidNew = typeNew.Guid.ToString(); Assert.AreEqual(MoMorphType.kguidMorphPrefix, sGuidNew, "morph type wrong for new MoAffixAllomorph in lexeme form of new lex entry"); contentNode = node.SelectSingleNode("Form"); Assert.IsNotNull(contentNode, "Form missing for new MoAffixAllomorph in alternate form new lex entry"); Assert.AreEqual(sNewAlloForm2, contentNode.InnerText, "form wrong for new MoAffixAllomorph in alternate form of new lex entry"); node = updatedFxtResult.SelectSingleNode("//LexSense[@Id='" + sense.Hvo + "']"); Assert.IsNotNull(node, "new sense for new lex entry is missing"); contentNode = node.SelectSingleNode("Gloss"); Assert.IsNotNull(contentNode, "Gloss missing for new LexSense in new lex entry"); Assert.AreEqual(sGloss, contentNode.InnerText, "Gloss wrong for new LexSense in new lex entry"); node = updatedFxtResult.SelectSingleNode("//MoInflAffMsa[@Id='" + inflAffixMsa.Hvo + "']"); Assert.IsNotNull(node, "new infl affix msa for new lex entry is missing"); contentNode = node.SelectSingleNode("@PartOfSpeech"); Assert.IsNotNull(contentNode, "@PartOfSpeech missing for new MoInflAffMsa in new lex entry"); Assert.AreEqual(hvoVerb.ToString(), contentNode.InnerText, "part of speech wrong for new MoInflAffMsa in new lex entry"); contentNode = node.SelectSingleNode("Slots/@dst"); Assert.IsNotNull(contentNode, "Slots missing for new MoInflAffMsa in new lex entry"); Assert.AreEqual(hvoSlot.ToString(), contentNode.InnerText, "slot wrong for new MoInflAffMsa in new lex entry"); // Test adding new template node = updatedFxtResult.SelectSingleNode("//MoInflAffixTemplate[@Id='" + affixTemplate.Hvo + "']"); Assert.IsNotNull(node, "new affix template missing"); node = updatedFxtResult.SelectSingleNode("//PartOfSpeech[@Id='" + hvoIntransVerb + "']/AffixTemplates/MoInflAffixTemplate[@Id='" + affixTemplate.Hvo + "']"); Assert.IsNotNull(node, "new affix template is in intransitive verb"); // Test adding new phonological feature node = updatedFxtResult.SelectSingleNode("//PhFeatureSystem/Features/FsClosedFeature[@Id='" + consFeat.Hvo + "']"); Assert.IsNotNull(node, "new phonological feature is missing"); contentNode = node.SelectSingleNode("Abbreviation"); Assert.IsNotNull(contentNode, "Abbreviation missing from new phonological feature"); Assert.AreEqual(contentNode.InnerText, consFeat.Abbreviation.AnalysisDefaultWritingSystem, "Abbreviation wrong for new phonological feature"); nodes = node.SelectNodes("Values/FsSymFeatVal"); Assert.IsNotNull(nodes, "values missing from new phonological feature"); Assert.AreEqual(nodes.Count, 2, "incorrect number of values in new phonological feature"); node = updatedFxtResult.SelectSingleNode("//PhFeatureSystem/Types/FsFeatStrucType/Features/Feature[@dst='" + consFeat.Hvo + "']"); Assert.IsNotNull(node, "reference to new phonological feature is missing from phonological feature system"); // Test adding new feature-based NC node = updatedFxtResult.SelectSingleNode("//PhNCFeatures[@Id='" + featNC.Hvo + "']"); Assert.IsNotNull(node, "new feature-based NC is missing"); contentNode = node.SelectSingleNode("Abbreviation"); Assert.IsNotNull(contentNode, "Abbreviation missing from new feature-based NC"); Assert.AreEqual(contentNode.InnerText, featNC.Abbreviation.AnalysisDefaultWritingSystem, "Abbreviation wrong for new feature-based NC"); contentNode = node.SelectSingleNode("FsFeatStruc/FsClosedValue[@Id='" + val.Hvo + "']"); Assert.IsNotNull(contentNode, "value missing from new feature-based NC"); Assert.AreEqual((contentNode as XmlElement).GetAttribute("Feature"), consFeat.Hvo.ToString(), "closed value feature is wrong in new feature-based NC"); Assert.AreEqual((contentNode as XmlElement).GetAttribute("Value"), consPlus.Hvo.ToString(), "closed value is wrong in new feature-based NC"); // Test adding new phonological rule node = updatedFxtResult.SelectSingleNode("//PhRegularRule[@Id='" + regRule.Hvo + "']"); Assert.IsNotNull(node, "new phonological rule is missing"); nodes = node.SelectNodes("StrucDesc/*"); Assert.AreEqual(nodes.Count, 0); contentNode = node.SelectSingleNode("RightHandSides/PhSegRuleRHS/StrucChange/PhSimpleContextSeg[@dst='" + phoneme.Hvo + "']"); Assert.IsNotNull(contentNode, "phoneme simple context missing in new phonological rule"); contentNode = node.SelectSingleNode("RightHandSides/PhSegRuleRHS/LeftContext/PhSimpleContextNC[@dst='" + featNC.Hvo + "']"); Assert.IsNotNull(contentNode, "NC simple context missing in new phonological rule"); // Test adding new morphological rule node = updatedFxtResult.SelectSingleNode("//Lexicon/Allomorphs/MoAffixProcess[@Id='" + affRule.Hvo + "']"); Assert.IsNotNull(node, "new morphological rule is missing"); contentNode = node.SelectSingleNode("Input/PhSimpleContextNC[@dst='" + featNC.Hvo + "']"); Assert.IsNotNull(contentNode, "NC simple context missing in new morphological rule"); contentNode = node.SelectSingleNode("Output/MoCopyFromInput/Content[@dst='" + ncCtxt.Hvo + "']"); Assert.IsNotNull(contentNode, "copy from input missing in new morphological rule"); // Modify a phonological rule segCtxt = new PhSimpleContextSeg(); regRule.StrucDescOS.Append(segCtxt); segCtxt.FeatureStructureRA = phoneme; segCtxt.NotifyNew(); regRule.RightHandSidesOS[0].StrucChangeOS[0].DeleteUnderlyingObject(); IPhPhonContext oldCtxt = regRule.RightHandSidesOS[0].LeftContextOA; Cache.LangProject.PhonologicalDataOA.ContextsOS.Append(oldCtxt); IPhSequenceContext seqCtxt = new PhSequenceContext(); regRule.RightHandSidesOS[0].LeftContextOA = seqCtxt; seqCtxt.MembersRS.Append(oldCtxt); seqCtxt.NotifyNew(); IPhSimpleContextBdry bdryCtxt = new PhSimpleContextBdry(); Cache.LangProject.PhonologicalDataOA.ContextsOS.Append(bdryCtxt); bdryCtxt.FeatureStructureRAHvo = Cache.GetIdFromGuid(LangProject.kguidPhRuleWordBdry); bdryCtxt.NotifyNew(); seqCtxt.MembersRS.Append(bdryCtxt); // Modify a morphological rule entryNew.LexemeFormOA = affRule; IMoInsertPhones insertPhones = new MoInsertPhones(); affRule.OutputOS.InsertAt(insertPhones, 0); insertPhones.ContentRS.Append(phoneme); insertPhones.NotifyNew(); affRule.InputOS[1].DeleteUnderlyingObject(); // change order of a sequence vector lexEntry.AlternateFormsOS.InsertAt(newAffixAllomorph, 0); updatedFxtResult = UpdateFXT(); // Test modifying a phonological rule node = updatedFxtResult.SelectSingleNode("//PhRegularRule[@Id='" + regRule.Hvo + "']"); contentNode = node.SelectSingleNode("StrucDesc/PhSimpleContextSeg[@dst='" + phoneme.Hvo + "']"); Assert.IsNotNull(contentNode, "phoneme simple context missing from StrucDesc in modified phonological rule"); contentNode = node.SelectSingleNode("RightHandSides/PhSegRuleRHS/StrucChange/PhSimpleContextSeg[@dst='" + phoneme.Hvo + "']"); Assert.IsNull(contentNode, "phoneme simple context is not missing from StrucChange in modified phonological rule"); contentNode = node.SelectSingleNode("RightHandSides/PhSegRuleRHS/LeftContext/PhSequenceContext[@Id='" + seqCtxt.Hvo + "']"); Assert.IsNotNull(contentNode, "sequence context missing from modified phonological rule"); contentNode = contentNode.SelectSingleNode("Members[@dst='" + bdryCtxt.Hvo + "']"); Assert.IsNotNull(contentNode, "boundary context missing from sequence context in modified phonological rule"); node = updatedFxtResult.SelectSingleNode("//PhPhonData/Contexts/PhSimpleContextBdry[@Id='" + bdryCtxt.Hvo + "']"); Assert.IsNotNull(node, "boundary context missing from contexts in phonological data"); // Test modifying a morphological rule node = updatedFxtResult.SelectSingleNode("//LexEntry[@Id='" + entryNew.Hvo + "']"); contentNode = node.SelectSingleNode("LexemeForm[@dst='" + affRule.Hvo + "']"); Assert.IsNotNull(contentNode, "affix process rule is not the lexeme form for the lex entry"); node = updatedFxtResult.SelectSingleNode("//Lexicon/Allomorphs/MoAffixProcess[@Id='" + affRule.Hvo + "']"); contentNode = node.SelectSingleNode("Input/PhSimpleContextNC[@dst='" + featNC.Hvo + "']"); Assert.IsNull(contentNode, "NC simple context was not removed from morphological rule"); nodes = node.SelectNodes("Output/*"); Assert.AreEqual(nodes.Count, 2, "incorrect number of mappings in morphological rule"); contentNode = node.SelectSingleNode("Output/*[position() = 1 and @Id='" + insertPhones.Hvo + "']"); Assert.IsNotNull(contentNode, "insert phones missing from morphological rule"); // Test changing order of a sequence vector node = updatedFxtResult.SelectSingleNode("//LexEntry[@Id='" + lexEntry.Hvo + "']"); contentNode = node.SelectSingleNode("AlternateForms[@dst='" + lexEntry.AlternateFormsOS[0].Hvo + "']/@ord"); Assert.AreEqual("0", contentNode.InnerText); contentNode = node.SelectSingleNode("AlternateForms[@dst='" + lexEntry.AlternateFormsOS[1].Hvo + "']/@ord"); Assert.AreEqual("1", contentNode.InnerText); contentNode = node.SelectSingleNode("AlternateForms[@dst='" + lexEntry.AlternateFormsOS[2].Hvo + "']/@ord"); Assert.AreEqual("2", contentNode.InnerText); }
/// ------------------------------------------------------------------------------------- /// <summary> /// Constructor /// </summary> /// <param name="lexicon">The lexical database</param> /// ------------------------------------------------------------------------------------- public FlexStylesXmlAccessor(ILexDb lexicon) : base(lexicon.Cache) { m_lexicon = lexicon; }
/// <summary> /// Create a new entry with the given guid owned by the given owner. /// </summary> public ILexEntry Create(Guid guid, ILexDb owner) { if (owner == null) throw new ArgumentNullException("owner"); ILexEntry le; if (guid == Guid.Empty) { le = Create(); } else { int hvo = ((IDataReader)m_cache.ServiceLocator.GetInstance<IDataSetup>()).GetNextRealHvo(); le = new LexEntry(m_cache, hvo, guid); } return le; }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// <param name="lexDb"></param> /// ------------------------------------------------------------------------------------ private static void AddEntryTypes(ILexDb lexDb) { var cache = lexDb.Cache; var servLoc = cache.ServiceLocator; var dataReader = (IDataReader)servLoc.GetInstance<IDataSetup>(); var tsf = cache.TsStrFactory; var eng = servLoc.WritingSystemManager.UserWritingSystem; var complexEntryTypesList = lexDb.ComplexEntryTypesOA; var lexEntryTypeFactory = servLoc.GetInstance<ILexEntryTypeFactory>() as ILexEntryTypeFactoryInternal; for (var i = 1; i <= 6; i++) { var guid = Guid.Empty; ITsString name = null; ITsString abbr = null; switch (i) { case 1: guid = new Guid("1f6ae209-141a-40db-983c-bee93af0ca3c"); name = tsf.MakeString("Compound", eng.Handle); abbr = tsf.MakeString("comp. of", eng.Handle); break; case 2: guid = new Guid("73266a3a-48e8-4bd7-8c84-91c730340b7d"); name = tsf.MakeString("Contraction", eng.Handle); abbr = tsf.MakeString("cont. of", eng.Handle); break; case 3: guid = new Guid("98c273c4-f723-4fb0-80df-eede2204dfca"); name = tsf.MakeString("Derivation", eng.Handle); abbr = tsf.MakeString("der. of", eng.Handle); break; case 4: guid = new Guid("b2276dec-b1a6-4d82-b121-fd114c009c59"); name = tsf.MakeString("Idiom", eng.Handle); abbr = tsf.MakeString("id. of", eng.Handle); break; case 5: guid = new Guid("35cee792-74c8-444e-a9b7-ed0461d4d3b7"); name = tsf.MakeString("Phrasal Verb", eng.Handle); abbr = tsf.MakeString("p.v.", eng.Handle); break; case 6: guid = new Guid("9466d126-246e-400b-8bba-0703e09bc567"); name = tsf.MakeString("Saying", eng.Handle); abbr = tsf.MakeString("say.", eng.Handle); break; } // Create the LexEntryType. lexEntryTypeFactory.Create( guid, dataReader.GetNextRealHvo(), complexEntryTypesList, i - 1, // Zero based ord. name, eng.Handle, abbr, eng.Handle); } var entryTypesList = lexDb.VariantEntryTypesOA; for (var i = 1; i <= 6; i++) { var guid = Guid.Empty; ITsString name = null; ITsString abbr = null; switch (i) { case 1: guid = new Guid("024b62c9-93b3-41a0-ab19-587a0030219a"); name = tsf.MakeString("Dialectal Variant", eng.Handle); abbr = tsf.MakeString("dial. var. of", eng.Handle); break; case 2: guid = new Guid("4343b1ef-b54f-4fa4-9998-271319a6d74c"); name = tsf.MakeString("Free Variant", eng.Handle); abbr = tsf.MakeString("fr. var. of", eng.Handle); break; case 3: guid = LexEntryTypeTags.kguidLexTypIrregInflectionVar; name = tsf.MakeString("Irregular Inflectional Variant", eng.Handle); abbr = tsf.MakeString("irr. inf. var. of", eng.Handle); break; case 4: guid = LexEntryTypeTags.kguidLexTypPluralVar; name = tsf.MakeString("Plural Variant", eng.Handle); abbr = tsf.MakeString("pl. var. of", eng.Handle); break; case 5: guid = LexEntryTypeTags.kguidLexTypPastVar; name = tsf.MakeString("Past Variant", eng.Handle); abbr = tsf.MakeString("pst. var. of", eng.Handle); break; case 6: guid = new Guid("0c4663b3-4d9a-47af-b9a1-c8565d8112ed"); name = tsf.MakeString("Spelling Variant", eng.Handle); abbr = tsf.MakeString("sp. var. of", eng.Handle); break; } // for Irregularly Inflected Variant Types, use LexEntryInflType factory if (guid == LexEntryTypeTags.kguidLexTypIrregInflectionVar || guid == LexEntryTypeTags.kguidLexTypPluralVar || guid == LexEntryTypeTags.kguidLexTypPastVar) { entryTypesList.PossibilitiesOS.Insert(i - 1, new LexEntryInflType(cache, dataReader.GetNextRealHvo(), guid)); var leit = entryTypesList.PossibilitiesOS[i - 1] as ILexEntryInflType; leit.Name.set_String(eng.Handle, name); leit.Abbreviation.set_String(eng.Handle, abbr); // todo: ReverseAbbr } else { // Create the LexEntryType. lexEntryTypeFactory.Create( guid, dataReader.GetNextRealHvo(), entryTypesList, i - 1, // Zero based ord. name, eng.Handle, abbr, eng.Handle); } } }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// <param name="ld"></param> /// <param name="cf"></param> /// <param name="defn"></param> /// <param name="hvoDomain"></param> /// <returns></returns> /// ------------------------------------------------------------------------------------ protected ILexEntry MakeLexEntry(ILexDb ld, string cf, string defn, int hvoDomain) { ILexEntry le = ld.EntriesOC.Add(new LexEntry()); le.CitationForm.VernacularDefaultWritingSystem = cf; ILexSense ls = le.SensesOS.Append(new LexSense()); ls.Definition.AnalysisDefaultWritingSystem.Text = defn; if (hvoDomain != 0) ls.SemanticDomainsRC.Add(hvoDomain); MoMorphSynAnalysis msa = new MoStemMsa(); le.MorphoSyntaxAnalysesOC.Add(msa); ls.MorphoSyntaxAnalysisRA = msa; return le; }
public void LexEntryInflTypeAnalysisWithNullForSlotFiller() { IWfiWordform brubs = CheckAnalysisSize("brubsTEST", 0, true); ILexDb ldb = Cache.LanguageProject.LexDbOA; ParseResult result = null; UndoableUnitOfWorkHelper.Do("Undo stuff", "Redo stuff", m_actionHandler, () => { // Verb brub which is a present tense irregularly inflected form of 'believe' // with automatically generated null Tense slot and an -s Plural Number slot filler // (This is not supposed to be English, in case you're wondering....) ILexEntryInflType presentTenseLexEntryInflType = m_lexEntryInflTypeFactory.Create(); Cache.LangProject.LexDbOA.VariantEntryTypesOA.PossibilitiesOS.Add(presentTenseLexEntryInflType); ILexEntry believeV = m_entryFactory.Create(); IMoStemAllomorph believeVForm = m_stemAlloFactory.Create(); believeV.AlternateFormsOS.Add(believeVForm); believeVForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("believeVTEST", m_vernacularWS.Handle); IMoStemMsa believeVMsa = m_stemMsaFactory.Create(); believeV.MorphoSyntaxAnalysesOC.Add(believeVMsa); ILexSense believeVSense = m_senseFactory.Create(); believeV.SensesOS.Add(believeVSense); believeVSense.MorphoSyntaxAnalysisRA = believeVMsa; ILexEntry brubV = m_entryFactory.Create(); IMoStemAllomorph brubVForm = m_stemAlloFactory.Create(); brubV.AlternateFormsOS.Add(brubVForm); brubVForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("brubVTEST", m_vernacularWS.Handle); ILexEntryRef lexEntryref = m_lexEntryRefFactory.Create(); brubV.EntryRefsOS.Add(lexEntryref); lexEntryref.ComponentLexemesRS.Add(believeV); lexEntryref.VariantEntryTypesRS.Add(presentTenseLexEntryInflType); ILexEntry nullPresent = m_entryFactory.Create(); IMoAffixAllomorph nullPresentForm = m_afxAlloFactory.Create(); nullPresent.AlternateFormsOS.Add(nullPresentForm); nullPresentForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("nullPRESENTTEST", m_vernacularWS.Handle); IMoInflAffMsa nullPresentMsa = m_inflAffMsaFactory.Create(); nullPresent.MorphoSyntaxAnalysesOC.Add(nullPresentMsa); ILexEntry sPlural = m_entryFactory.Create(); IMoAffixAllomorph sPluralForm = m_afxAlloFactory.Create(); sPlural.AlternateFormsOS.Add(sPluralForm); sPluralForm.Form.VernacularDefaultWritingSystem = Cache.TsStrFactory.MakeString("sPLURALTEST", m_vernacularWS.Handle); IMoInflAffMsa sPluralMsa = m_inflAffMsaFactory.Create(); sPlural.MorphoSyntaxAnalysesOC.Add(sPluralMsa); result = new ParseResult(new[] { new ParseAnalysis(new[] { new ParseMorph(brubVForm, MorphServices.GetMainOrFirstSenseOfVariant(brubV.EntryRefsOS[0]).MorphoSyntaxAnalysisRA, (ILexEntryInflType)brubV.EntryRefsOS[0].VariantEntryTypesRS[0]), new ParseMorph(sPluralForm, sPluralMsa) }) }); }); m_filer.ProcessParse(brubs, ParserPriority.Low, result); ExecuteIdleQueue(); CheckAnalysisSize("brubsTEST", 1, false); var analysis = brubs.AnalysesOC.ElementAt(0); Assert.AreEqual(2, analysis.MorphBundlesOS.Count, "Expected only 2 morphs in the analysis"); var morphBundle = analysis.MorphBundlesOS.ElementAt(0); Assert.IsNotNull(morphBundle.Form, "First bundle: form is not null"); Assert.IsNotNull(morphBundle.MsaRA, "First bundle: msa is not null"); Assert.IsNotNull(morphBundle.InflTypeRA, "First bundle: infl type is not null"); }
private ILexSense CreateLexicalEntryWithSense(ILexDb lexDb) { var entry = Cache.ServiceLocator.GetInstance <ILexEntryFactory>().Create(new Guid(), lexDb); return(Cache.ServiceLocator.GetInstance <ILexSenseFactory>().Create(new Guid(), entry)); }
private ILexSense CreateLexicalEntryWithSense(ILexDb lexDb) { var entry = Cache.ServiceLocator.GetInstance<ILexEntryFactory>().Create(new Guid(), lexDb); return Cache.ServiceLocator.GetInstance<ILexSenseFactory>().Create(new Guid(), entry); }
public void LexEntryInflTypeTwoAnalyses() { IWfiWordform creb = CheckAnalysisSize("crebTEST", 0, true); ILexDb ldb = Cache.LanguageProject.LexDbOA; ParseResult result = null; UndoableUnitOfWorkHelper.Do("Undo stuff", "Redo stuff", m_actionHandler, () => { // Verb creb which is a past tense, plural irregularly inflected form of 'believe' and also 'seek' // with automatically generated null Tense slot and an automatically generated null Number slot filler // (This is not supposed to be English, in case you're wondering....) ILexEntryInflType pastTenseLexEntryInflType = m_lexEntryInflTypeFactory.Create(); ILexEntryInflType pluralTenseLexEntryInflType = m_lexEntryInflTypeFactory.Create(); Cache.LangProject.LexDbOA.VariantEntryTypesOA.PossibilitiesOS.Add(pastTenseLexEntryInflType); Cache.LangProject.LexDbOA.VariantEntryTypesOA.PossibilitiesOS.Add(pluralTenseLexEntryInflType); ILexEntry believeV = m_entryFactory.Create(); IMoStemAllomorph believeVForm = m_stemAlloFactory.Create(); believeV.AlternateFormsOS.Add(believeVForm); believeVForm.Form.VernacularDefaultWritingSystem = TsStringUtils.MakeString("believeVTEST", m_vernacularWS.Handle); IMoStemMsa believeVMsa = m_stemMsaFactory.Create(); believeV.MorphoSyntaxAnalysesOC.Add(believeVMsa); ILexSense believeVSense = m_senseFactory.Create(); believeV.SensesOS.Add(believeVSense); believeVSense.MorphoSyntaxAnalysisRA = believeVMsa; ILexEntry seekV = m_entryFactory.Create(); IMoStemAllomorph seekVForm = m_stemAlloFactory.Create(); believeV.AlternateFormsOS.Add(seekVForm); seekVForm.Form.VernacularDefaultWritingSystem = TsStringUtils.MakeString("seekVTEST", m_vernacularWS.Handle); IMoStemMsa seekVMsa = m_stemMsaFactory.Create(); seekV.MorphoSyntaxAnalysesOC.Add(seekVMsa); ILexSense seekVSense = m_senseFactory.Create(); seekV.SensesOS.Add(seekVSense); seekVSense.MorphoSyntaxAnalysisRA = seekVMsa; ILexEntry crebV = m_entryFactory.Create(); IMoStemAllomorph crebVForm = m_stemAlloFactory.Create(); crebV.AlternateFormsOS.Add(crebVForm); crebVForm.Form.VernacularDefaultWritingSystem = TsStringUtils.MakeString("crebVTEST", m_vernacularWS.Handle); ILexEntryRef lexEntryref = m_lexEntryRefFactory.Create(); crebV.EntryRefsOS.Add(lexEntryref); lexEntryref.ComponentLexemesRS.Add(believeV); lexEntryref.VariantEntryTypesRS.Add(pastTenseLexEntryInflType); lexEntryref.VariantEntryTypesRS.Add(pluralTenseLexEntryInflType); lexEntryref = m_lexEntryRefFactory.Create(); crebV.EntryRefsOS.Add(lexEntryref); lexEntryref.ComponentLexemesRS.Add(seekV); lexEntryref.VariantEntryTypesRS.Add(pastTenseLexEntryInflType); lexEntryref.VariantEntryTypesRS.Add(pluralTenseLexEntryInflType); ILexEntry nullPast = m_entryFactory.Create(); IMoAffixAllomorph nullPastForm = m_afxAlloFactory.Create(); nullPast.AlternateFormsOS.Add(nullPastForm); nullPastForm.Form.VernacularDefaultWritingSystem = TsStringUtils.MakeString("nullPASTTEST", m_vernacularWS.Handle); IMoInflAffMsa nullPastMsa = m_inflAffMsaFactory.Create(); nullPast.MorphoSyntaxAnalysesOC.Add(nullPastMsa); ILexEntry nullPlural = m_entryFactory.Create(); IMoAffixAllomorph nullPluralForm = m_afxAlloFactory.Create(); nullPlural.AlternateFormsOS.Add(nullPluralForm); nullPluralForm.Form.VernacularDefaultWritingSystem = TsStringUtils.MakeString("nullPLURALTEST", m_vernacularWS.Handle); IMoInflAffMsa nullPluralMsa = m_inflAffMsaFactory.Create(); nullPlural.MorphoSyntaxAnalysesOC.Add(nullPluralMsa); result = new ParseResult(new[] { new ParseAnalysis(new[] { new ParseMorph(crebVForm, MorphServices.GetMainOrFirstSenseOfVariant(crebV.EntryRefsOS[1]).MorphoSyntaxAnalysisRA, (ILexEntryInflType)crebV.EntryRefsOS[1].VariantEntryTypesRS[0]) }), new ParseAnalysis(new[] { new ParseMorph(crebVForm, MorphServices.GetMainOrFirstSenseOfVariant(crebV.EntryRefsOS[0]).MorphoSyntaxAnalysisRA, (ILexEntryInflType)crebV.EntryRefsOS[0].VariantEntryTypesRS[0]) }) }); }); m_filer.ProcessParse(creb, ParserPriority.Low, result); ExecuteIdleQueue(); CheckAnalysisSize("crebTEST", 2, false); foreach (var analysis in creb.AnalysesOC) { Assert.AreEqual(1, analysis.MorphBundlesOS.Count, "Expected only 1 morph in the analysis"); var morphBundle = analysis.MorphBundlesOS.ElementAt(0); Assert.IsNotNull(morphBundle.Form, "First bundle: form is not null"); Assert.IsNotNull(morphBundle.MsaRA, "First bundle: msa is not null"); Assert.IsNotNull(morphBundle.InflTypeRA, "First bundle: infl type is not null"); } }
private ILexEntry CreateLexicalEntry(ILexDb lexDb) { return(Cache.ServiceLocator.GetInstance <ILexEntryFactory>().Create(new Guid(), lexDb)); }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// ------------------------------------------------------------------------------------ protected override void CreateTestData() { m_ldb = Cache.LangProject.LexDbOA; }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// <param name="lexDb"></param> /// ------------------------------------------------------------------------------------ private static void AddEntryTypes(ILexDb lexDb) { var cache = lexDb.Cache; var servLoc = cache.ServiceLocator; var dataReader = (IDataReader)servLoc.GetInstance <IDataSetup>(); var tsf = cache.TsStrFactory; var eng = servLoc.WritingSystemManager.UserWritingSystem; var complexEntryTypesList = lexDb.ComplexEntryTypesOA; var lexEntryTypeFactory = (ILexEntryTypeFactoryInternal)servLoc.GetInstance <ILexEntryTypeFactory>(); for (var i = 0; i <= 6; i++) { var guid = Guid.Empty; ITsString name = null; ITsString abbr = null; switch (i) { case 0: guid = new Guid("fec038ed-6a8c-4fa5-bc96-a4f515a98c50"); name = tsf.MakeString("Unspecified Complex Form", eng.Handle); abbr = tsf.MakeString("unspec. comp. form of", eng.Handle); break; case 1: guid = new Guid("1f6ae209-141a-40db-983c-bee93af0ca3c"); name = tsf.MakeString("Compound", eng.Handle); abbr = tsf.MakeString("comp. of", eng.Handle); break; case 2: guid = new Guid("73266a3a-48e8-4bd7-8c84-91c730340b7d"); name = tsf.MakeString("Contraction", eng.Handle); abbr = tsf.MakeString("cont. of", eng.Handle); break; case 3: guid = new Guid("98c273c4-f723-4fb0-80df-eede2204dfca"); name = tsf.MakeString("Derivation", eng.Handle); abbr = tsf.MakeString("der. of", eng.Handle); break; case 4: guid = new Guid("b2276dec-b1a6-4d82-b121-fd114c009c59"); name = tsf.MakeString("Idiom", eng.Handle); abbr = tsf.MakeString("id. of", eng.Handle); break; case 5: guid = new Guid("35cee792-74c8-444e-a9b7-ed0461d4d3b7"); name = tsf.MakeString("Phrasal Verb", eng.Handle); abbr = tsf.MakeString("p.v.", eng.Handle); break; case 6: guid = new Guid("9466d126-246e-400b-8bba-0703e09bc567"); name = tsf.MakeString("Saying", eng.Handle); abbr = tsf.MakeString("say.", eng.Handle); break; } // Create the LexEntryType. lexEntryTypeFactory.Create( guid, dataReader.GetNextRealHvo(), complexEntryTypesList, i, // Zero based ord. name, eng.Handle, abbr, eng.Handle); } var variantEntryTypesList = lexDb.VariantEntryTypesOA; for (var i = 0; i <= 6; i++) { var guid = Guid.Empty; ITsString name = null; ITsString abbr = null; switch (i) { case 0: guid = new Guid("3942addb-99fd-43e9-ab7d-99025ceb0d4e"); name = tsf.MakeString("Unspecified Variant", eng.Handle); abbr = tsf.MakeString("unspec. var. of", eng.Handle); break; case 1: guid = new Guid("024b62c9-93b3-41a0-ab19-587a0030219a"); name = tsf.MakeString("Dialectal Variant", eng.Handle); abbr = tsf.MakeString("dial. var. of", eng.Handle); break; case 2: guid = new Guid("4343b1ef-b54f-4fa4-9998-271319a6d74c"); name = tsf.MakeString("Free Variant", eng.Handle); abbr = tsf.MakeString("fr. var. of", eng.Handle); break; case 3: guid = LexEntryTypeTags.kguidLexTypIrregInflectionVar; name = tsf.MakeString("Irregular Inflectional Variant", eng.Handle); abbr = tsf.MakeString("irr. inf. var. of", eng.Handle); break; case 4: guid = LexEntryTypeTags.kguidLexTypPluralVar; name = tsf.MakeString("Plural Variant", eng.Handle); abbr = tsf.MakeString("pl. var. of", eng.Handle); break; case 5: guid = LexEntryTypeTags.kguidLexTypPastVar; name = tsf.MakeString("Past Variant", eng.Handle); abbr = tsf.MakeString("pst. var. of", eng.Handle); break; case 6: guid = new Guid("0c4663b3-4d9a-47af-b9a1-c8565d8112ed"); name = tsf.MakeString("Spelling Variant", eng.Handle); abbr = tsf.MakeString("sp. var. of", eng.Handle); break; } // for Irregularly Inflected Variant Types, use LexEntryInflType factory if (guid == LexEntryTypeTags.kguidLexTypIrregInflectionVar || guid == LexEntryTypeTags.kguidLexTypPluralVar || guid == LexEntryTypeTags.kguidLexTypPastVar) { variantEntryTypesList.PossibilitiesOS.Insert(i, new LexEntryInflType(cache, dataReader.GetNextRealHvo(), guid)); var leit = (ILexEntryInflType)variantEntryTypesList.PossibilitiesOS[i]; leit.Name.set_String(eng.Handle, name); leit.Abbreviation.set_String(eng.Handle, abbr); // todo: ReverseAbbr } else { // Create the LexEntryType. lexEntryTypeFactory.Create( guid, dataReader.GetNextRealHvo(), variantEntryTypesList, i, // Zero based ord. name, eng.Handle, abbr, eng.Handle); } } }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// <param name="lexDb"></param> /// ------------------------------------------------------------------------------------ private static void AddMorphTypes(ILexDb lexDb) { var cache = lexDb.Cache; var servLoc = cache.ServiceLocator; var dataReader = (IDataReader)servLoc.GetInstance <IDataSetup>(); var lexEntryTypeFactory = servLoc.GetInstance <IMoMorphTypeFactory>() as IMoMorphTypeFactoryInternal; var tsf = cache.TsStrFactory; var eng = servLoc.WritingSystemManager.UserWritingSystem; var morphTypesList = servLoc.GetInstance <ICmPossibilityListFactory>().Create(); lexDb.MorphTypesOA = morphTypesList; lexDb.MorphTypesOA.ItemClsid = MoMorphTypeTags.kClassId; for (var i = 1; i <= 19; i++) { var guid = Guid.Empty; string prefix = null; string postfix = null; var secondaryOrder = 0; ITsString name = null; ITsString abbr = null; switch (i) { case 1: guid = new Guid("d7f713e4-e8cf-11d3-9764-00c04f186933"); prefix = "*"; secondaryOrder = 2; name = tsf.MakeString("bound root", eng.Handle); abbr = tsf.MakeString("bd root", eng.Handle); break; case 2: guid = new Guid("d7f713e7-e8cf-11d3-9764-00c04f186933"); prefix = "*"; secondaryOrder = 2; name = tsf.MakeString("bound stem", eng.Handle); abbr = tsf.MakeString("bd stem", eng.Handle); break; case 3: guid = new Guid("d7f713df-e8cf-11d3-9764-00c04f186933"); secondaryOrder = 1; name = tsf.MakeString("circumfix", eng.Handle); abbr = tsf.MakeString("cfx", eng.Handle); break; case 4: guid = new Guid("d7f713e1-e8cf-11d3-9764-00c04f186933"); prefix = "="; secondaryOrder = 7; name = tsf.MakeString("enclitic", eng.Handle); abbr = tsf.MakeString("enclit", eng.Handle); break; case 5: guid = new Guid("d7f713da-e8cf-11d3-9764-00c04f186933"); prefix = "-"; postfix = "-"; secondaryOrder = 5; name = tsf.MakeString("infix", eng.Handle); abbr = tsf.MakeString("ifx", eng.Handle); break; case 6: guid = new Guid("56db04bf-3d58-44cc-b292-4c8aa68538f4"); secondaryOrder = 1; name = tsf.MakeString("particle", eng.Handle); abbr = tsf.MakeString("part", eng.Handle); break; case 7: guid = new Guid("d7f713db-e8cf-11d3-9764-00c04f186933"); postfix = "-"; secondaryOrder = 3; name = tsf.MakeString("prefix", eng.Handle); abbr = tsf.MakeString("pfx", eng.Handle); break; case 8: guid = new Guid("d7f713e2-e8cf-11d3-9764-00c04f186933"); postfix = "="; secondaryOrder = 4; name = tsf.MakeString("proclitic", eng.Handle); abbr = tsf.MakeString("proclit", eng.Handle); break; case 9: guid = new Guid("d7f713e5-e8cf-11d3-9764-00c04f186933"); secondaryOrder = 1; name = tsf.MakeString("root", eng.Handle); abbr = tsf.MakeString("ubd root", eng.Handle); break; case 10: guid = new Guid("d7f713dc-e8cf-11d3-9764-00c04f186933"); prefix = "="; postfix = "="; secondaryOrder = 5; name = tsf.MakeString("simulfix", eng.Handle); abbr = tsf.MakeString("smfx", eng.Handle); break; case 11: guid = new Guid("d7f713e8-e8cf-11d3-9764-00c04f186933"); secondaryOrder = 1; name = tsf.MakeString("stem", eng.Handle); abbr = tsf.MakeString("ubd stem", eng.Handle); break; case 12: guid = new Guid("d7f713dd-e8cf-11d3-9764-00c04f186933"); prefix = "-"; secondaryOrder = 6; name = tsf.MakeString("suffix", eng.Handle); abbr = tsf.MakeString("sfx", eng.Handle); break; case 13: guid = new Guid("d7f713de-e8cf-11d3-9764-00c04f186933"); prefix = "~"; postfix = "~"; secondaryOrder = 5; name = tsf.MakeString("suprafix", eng.Handle); abbr = tsf.MakeString("spfx", eng.Handle); break; case 14: guid = new Guid("18d9b1c3-b5b6-4c07-b92c-2fe1d2281bd4"); prefix = "-"; postfix = "-"; name = tsf.MakeString("infixing interfix", eng.Handle); abbr = tsf.MakeString("ifxnfx", eng.Handle); break; case 15: guid = new Guid("af6537b0-7175-4387-ba6a-36547d37fb13"); postfix = "-"; name = tsf.MakeString("prefixing interfix", eng.Handle); abbr = tsf.MakeString("pfxnfx", eng.Handle); break; case 16: guid = new Guid("3433683d-08a9-4bae-ae53-2a7798f64068"); prefix = "-"; name = tsf.MakeString("suffixing interfix", eng.Handle); abbr = tsf.MakeString("sfxnfx", eng.Handle); break; case 17: guid = new Guid("a23b6faa-1052-4f4d-984b-4b338bdaf95f"); name = tsf.MakeString("phrase", eng.Handle); abbr = tsf.MakeString("phr", eng.Handle); break; case 18: guid = new Guid("0cc8c35a-cee9-434d-be58-5d29130fba5b"); name = tsf.MakeString("discontiguous phrase", eng.Handle); abbr = tsf.MakeString("dis phr", eng.Handle); break; case 19: guid = new Guid("c2d140e5-7ca9-41f4-a69a-22fc7049dd2c"); name = tsf.MakeString("clitic", eng.Handle); abbr = tsf.MakeString("c**t", eng.Handle); break; } // Create the MoMorphType. lexEntryTypeFactory.Create( guid, dataReader.GetNextRealHvo(), morphTypesList, i - 1, name, eng.Handle, abbr, eng.Handle, prefix, postfix, secondaryOrder); } }
/// ------------------------------------------------------------------------------------ /// <summary> /// /// </summary> /// <param name="lexDb"></param> /// ------------------------------------------------------------------------------------ private static void AddMorphTypes(ILexDb lexDb) { var cache = lexDb.Cache; var servLoc = cache.ServiceLocator; var dataReader = (IDataReader)servLoc.GetInstance<IDataSetup>(); var lexEntryTypeFactory = servLoc.GetInstance<IMoMorphTypeFactory>() as IMoMorphTypeFactoryInternal; var tsf = cache.TsStrFactory; var eng = servLoc.WritingSystemManager.UserWritingSystem; var morphTypesList = servLoc.GetInstance<ICmPossibilityListFactory>().Create(); lexDb.MorphTypesOA = morphTypesList; lexDb.MorphTypesOA.ItemClsid = MoMorphTypeTags.kClassId; for (var i = 1; i <= 19; i++) { var guid = Guid.Empty; string prefix = null; string postfix = null; var secondaryOrder = 0; ITsString name = null; ITsString abbr = null; switch (i) { case 1: guid = new Guid("d7f713e4-e8cf-11d3-9764-00c04f186933"); prefix = "*"; secondaryOrder = 2; name = tsf.MakeString("bound root", eng.Handle); abbr = tsf.MakeString("bd root", eng.Handle); break; case 2: guid = new Guid("d7f713e7-e8cf-11d3-9764-00c04f186933"); prefix = "*"; secondaryOrder = 2; name = tsf.MakeString("bound stem", eng.Handle); abbr = tsf.MakeString("bd stem", eng.Handle); break; case 3: guid = new Guid("d7f713df-e8cf-11d3-9764-00c04f186933"); secondaryOrder = 1; name = tsf.MakeString("circumfix", eng.Handle); abbr = tsf.MakeString("cfx", eng.Handle); break; case 4: guid = new Guid("d7f713e1-e8cf-11d3-9764-00c04f186933"); prefix = "="; secondaryOrder = 7; name = tsf.MakeString("enclitic", eng.Handle); abbr = tsf.MakeString("enclit", eng.Handle); break; case 5: guid = new Guid("d7f713da-e8cf-11d3-9764-00c04f186933"); prefix = "-"; postfix = "-"; secondaryOrder = 5; name = tsf.MakeString("infix", eng.Handle); abbr = tsf.MakeString("ifx", eng.Handle); break; case 6: guid = new Guid("56db04bf-3d58-44cc-b292-4c8aa68538f4"); secondaryOrder = 1; name = tsf.MakeString("particle", eng.Handle); abbr = tsf.MakeString("part", eng.Handle); break; case 7: guid = new Guid("d7f713db-e8cf-11d3-9764-00c04f186933"); postfix = "-"; secondaryOrder = 3; name = tsf.MakeString("prefix", eng.Handle); abbr = tsf.MakeString("pfx", eng.Handle); break; case 8: guid = new Guid("d7f713e2-e8cf-11d3-9764-00c04f186933"); postfix = "="; secondaryOrder = 4; name = tsf.MakeString("proclitic", eng.Handle); abbr = tsf.MakeString("proclit", eng.Handle); break; case 9: guid = new Guid("d7f713e5-e8cf-11d3-9764-00c04f186933"); secondaryOrder = 1; name = tsf.MakeString("root", eng.Handle); abbr = tsf.MakeString("ubd root", eng.Handle); break; case 10: guid = new Guid("d7f713dc-e8cf-11d3-9764-00c04f186933"); prefix = "="; postfix = "="; secondaryOrder = 5; name = tsf.MakeString("simulfix", eng.Handle); abbr = tsf.MakeString("smfx", eng.Handle); break; case 11: guid = new Guid("d7f713e8-e8cf-11d3-9764-00c04f186933"); secondaryOrder = 1; name = tsf.MakeString("stem", eng.Handle); abbr = tsf.MakeString("ubd stem", eng.Handle); break; case 12: guid = new Guid("d7f713dd-e8cf-11d3-9764-00c04f186933"); prefix = "-"; secondaryOrder = 6; name = tsf.MakeString("suffix", eng.Handle); abbr = tsf.MakeString("sfx", eng.Handle); break; case 13: guid = new Guid("d7f713de-e8cf-11d3-9764-00c04f186933"); prefix = "~"; postfix = "~"; secondaryOrder = 5; name = tsf.MakeString("suprafix", eng.Handle); abbr = tsf.MakeString("spfx", eng.Handle); break; case 14: guid = new Guid("18d9b1c3-b5b6-4c07-b92c-2fe1d2281bd4"); prefix = "-"; postfix = "-"; name = tsf.MakeString("infixing interfix", eng.Handle); abbr = tsf.MakeString("ifxnfx", eng.Handle); break; case 15: guid = new Guid("af6537b0-7175-4387-ba6a-36547d37fb13"); postfix = "-"; name = tsf.MakeString("prefixing interfix", eng.Handle); abbr = tsf.MakeString("pfxnfx", eng.Handle); break; case 16: guid = new Guid("3433683d-08a9-4bae-ae53-2a7798f64068"); prefix = "-"; name = tsf.MakeString("suffixing interfix", eng.Handle); abbr = tsf.MakeString("sfxnfx", eng.Handle); break; case 17: guid = new Guid("a23b6faa-1052-4f4d-984b-4b338bdaf95f"); name = tsf.MakeString("phrase", eng.Handle); abbr = tsf.MakeString("phr", eng.Handle); break; case 18: guid = new Guid("0cc8c35a-cee9-434d-be58-5d29130fba5b"); name = tsf.MakeString("discontiguous phrase", eng.Handle); abbr = tsf.MakeString("dis phr", eng.Handle); break; case 19: guid = new Guid("c2d140e5-7ca9-41f4-a69a-22fc7049dd2c"); name = tsf.MakeString("clitic", eng.Handle); abbr = tsf.MakeString("c**t", eng.Handle); break; } // Create the MoMorphType. lexEntryTypeFactory.Create( guid, dataReader.GetNextRealHvo(), morphTypesList, i - 1, name, eng.Handle, abbr, eng.Handle, prefix, postfix, secondaryOrder); } }
private ILexEntry CreateLexicalEntry(ILexDb lexDb) { return Cache.ServiceLocator.GetInstance<ILexEntryFactory>().Create(new Guid(), lexDb); }