public Lemmatizer(LemmatizerSettings lsett) { this.Lsett = lsett; this.ElExamples = new ExampleList(lsett); this.LtnRootNode = null; this.LtnRootNodeFront = null; }
/// <summary> /// /// </summary> /// <param name="lsett"></param> /// <param name="elExamples"></param> /// <param name="iStart">Index of the first word of the current group</param> /// <param name="iEnd">Index of the last word of the current group</param> /// <param name="ltnParentNode"></param> private LemmaTreeNode(LemmatizerSettings lsett, ExampleList elExamples, int iStart, int iEnd, LemmaTreeNode ltnParentNode) : this(lsett) { this.ltnParentNode = ltnParentNode; this.dictSubNodes = null; this.iStart = iStart; this.iEnd = iEnd; this.elExamples = elExamples; if (iStart >= elExamples.Count || iEnd >= elExamples.Count || iStart > iEnd) { lrBestRule = elExamples.Rules.DefaultRule; aBestRules = new RuleWeighted[1]; aBestRules[0] = new RuleWeighted(lrBestRule, 0); dWeight = 0; return; } int iConditionLength = Math.Min(ltnParentNode == null ? 0 : ltnParentNode.iSimilarity + 1, elExamples[iStart].Word.Length); this.sCondition = elExamples[iStart].Word.Substring(elExamples[iStart].Word.Length - iConditionLength); this.iSimilarity = elExamples[iStart].Similarity(elExamples[iEnd]); this.bWholeWord = ltnParentNode == null ? false : elExamples[iEnd].Word.Length == ltnParentNode.iSimilarity; FindBestRules(); AddSubAll(); //TODO check this heuristics, can be problematic when there are more applicable rules if (dictSubNodes != null) { List <KeyValuePair <char, LemmaTreeNode> > lReplaceNodes = new List <KeyValuePair <char, LemmaTreeNode> >(); foreach (KeyValuePair <char, LemmaTreeNode> kvpChild in dictSubNodes) { if (kvpChild.Value.dictSubNodes != null && kvpChild.Value.dictSubNodes.Count == 1) { IEnumerator <LemmaTreeNode> enumChildChild = kvpChild.Value.dictSubNodes.Values.GetEnumerator(); enumChildChild.MoveNext(); LemmaTreeNode ltrChildChild = enumChildChild.Current; if (kvpChild.Value.lrBestRule == lrBestRule) { lReplaceNodes.Add(new KeyValuePair <char, LemmaTreeNode>(kvpChild.Key, ltrChildChild)); } } } foreach (KeyValuePair <char, LemmaTreeNode> kvpChild in lReplaceNodes) { dictSubNodes[kvpChild.Key] = kvpChild.Value; kvpChild.Value.ltnParentNode = this; } } }
public void Load(Latino.BinarySerializer binRead) { lsett = new LemmatizerSettings(binRead); elExamples = new ExampleList(binRead, lsett); if (!lsett.bBuildFrontLemmatizer) { ltnRootNode = new LemmaTreeNode(binRead, lsett, elExamples, null); } else { ltnRootNode = new LemmaTreeNode(binRead, lsett, elExamples.GetFrontRearExampleList(false), null); ltnRootNodeFront = new LemmaTreeNode(binRead, lsett, elExamples.GetFrontRearExampleList(true), null); } }
public void Deserialize(BinaryReader binRead) { using (binRead) { // settings Lsett = new LemmatizerSettings(binRead); // examples bool bSerializeExamples = binRead.ReadBoolean(); ElExamples = new ExampleList(binRead, Lsett); ExampleList elExamplesRear; ExampleList elExamplesFront; if (bSerializeExamples) { elExamplesRear = ElExamples.GetFrontRearExampleList(false); elExamplesFront = ElExamples.GetFrontRearExampleList(true); } else { elExamplesRear = new ExampleList(binRead, Lsett); elExamplesFront = new ExampleList(binRead, Lsett); } // root node LtnRootNode = new LemmaTreeNode(binRead, Lsett, Lsett.bBuildFrontLemmatizer ? elExamplesRear : ElExamples, null); // root node front if (Lsett.bBuildFrontLemmatizer) { LtnRootNodeFront = new LemmaTreeNode(binRead, Lsett, elExamplesFront, null); } // exceptions - use try catch for retro compatibility // --> this section is missing in the old lemmatizer files try { var nbOfExceptions = binRead.ReadInt32(); for (var i = 0; i < nbOfExceptions; i++) { var exception = binRead.ReadString(); var parts = exception.Split(' '); this.AddException(parts[0], parts[1]); } } catch (Exception) { Trace.WriteLine("Couldn't deserialize exceptions in Lemmatizer file"); } } }
private void AddSub(int iStart, int iEnd, char chChar) { var ltnSub = new LemmaTreeNode(lsett, elExamples, iStart, iEnd, this); //TODO - maybe not realy appropriate because loosing statisitcs from multiple possible rules if (ltnSub.lrBestRule == lrBestRule && ltnSub.dictSubNodes == null) { return; } if (dictSubNodes == null) { dictSubNodes = new Dictionary <char, LemmaTreeNode>(); } dictSubNodes.Add(chChar, ltnSub); }
public void Deserialize(BinaryReader binRead, LemmatizerSettings lsett, ExampleList elExamples, LemmaTreeNode ltnParentNode) { this.lsett = lsett; // read is not null? if (binRead.ReadBoolean()) { // read all dictionary (key + value) dictSubNodes = new ConcurrentDictionary <char, LemmaTreeNode>(); int iCount = binRead.ReadInt32(); for (int i = 0; i < iCount; i++) { char cKey = binRead.ReadChar(); var ltrSub = new LemmaTreeNode(binRead, this.lsett, elExamples, this); dictSubNodes.TryAdd(cKey, ltrSub); } } else { dictSubNodes = null; } this.ltnParentNode = ltnParentNode; // read similarity, condition and wholeword? iSimilarity = binRead.ReadInt32(); sCondition = binRead.ReadString(); bWholeWord = binRead.ReadBoolean(); // best rule signature lrBestRule = elExamples.Rules[binRead.ReadString()]; // best rules int iCountBest = binRead.ReadInt32(); aBestRules = new RuleWeighted[iCountBest]; for (int i = 0; i < iCountBest; i++) { aBestRules[i] = new RuleWeighted(elExamples.Rules[binRead.ReadString()], binRead.ReadDouble()); } // weight, start, end dWeight = binRead.ReadDouble(); iStart = binRead.ReadInt32(); iEnd = binRead.ReadInt32(); this.elExamples = elExamples; }
// Essential Class Functions (building model & lemmatizing) ---------- public void BuildModel() { if (LtnRootNode != null) { return; } if (!Lsett.bBuildFrontLemmatizer) { //TODO remove: elExamples.FinalizeAdditions(); ElExamples.FinalizeAdditions(); LtnRootNode = new LemmaTreeNode(Lsett, ElExamples); } else { LtnRootNode = new LemmaTreeNode(Lsett, ElExamples.GetFrontRearExampleList(false)); LtnRootNodeFront = new LemmaTreeNode(Lsett, ElExamples.GetFrontRearExampleList(true)); } }
public void Load(Latino.BinarySerializer binRead, LemmatizerSettings lsett, ExampleList elExamples, LemmaTreeNode ltnParentNode) { this.lsett = lsett; if (binRead.ReadBool()) { dictSubNodes = new Dictionary <char, LemmaTreeNode>(); int iCount = binRead.ReadInt(); for (int i = 0; i < iCount; i++) { char cKey = binRead.ReadChar(); LemmaTreeNode ltrSub = new LemmaTreeNode(binRead, this.lsett, elExamples, this); dictSubNodes.Add(cKey, ltrSub); } } else { dictSubNodes = null; } this.ltnParentNode = ltnParentNode; iSimilarity = binRead.ReadInt(); sCondition = binRead.ReadString(); bWholeWord = binRead.ReadBool(); lrBestRule = elExamples.Rules[binRead.ReadString()]; int iCountBest = binRead.ReadInt(); aBestRules = new RuleWeighted[iCountBest]; for (int i = 0; i < iCountBest; i++) { aBestRules[i] = new RuleWeighted(elExamples.Rules[binRead.ReadString()], binRead.ReadDouble()); } dWeight = binRead.ReadDouble(); iStart = binRead.ReadInt(); iEnd = binRead.ReadInt(); this.elExamples = elExamples; }
public void AddExample(string sWord, string sLemma, double dWeight, string sMsd) { ElExamples.AddExample(sWord, sLemma, dWeight, sMsd); LtnRootNode = null; }
// Essential Class Functions (adding examples to repository) ---------- public void AddMultextFile(StreamReader srIn, string sFormat) { this.ElExamples.AddMultextFile(srIn, sFormat); LtnRootNode = null; }
public LemmaTreeNode(Latino.BinarySerializer binRead, LemmatizerSettings lsett, ExampleList elExamples, LemmaTreeNode ltnParentNode) { Load(binRead, lsett, elExamples, ltnParentNode); }
public LemmaTreeNode(BinaryReader binRead, LemmatizerSettings lsett, ExampleList elExamples, LemmaTreeNode ltnParentNode) { Deserialize(binRead, lsett, elExamples, ltnParentNode); }