private void MyInit() { ISimilarity editdistance = new Leven(); getSimilarity = new Similarity(editdistance.GetSimilarity); //ISimilarity lexical=new LexicalSimilarity() ; //getSimilarity=new Similarity(lexical.GetSimilarity) ; var tokeniser = new Tokeniser(); _leftTokens = tokeniser.Partition(_lString); _rightTokens = tokeniser.Partition(_rString); if (_leftTokens.Length > _rightTokens.Length) { string[] tmp = _leftTokens; _leftTokens = _rightTokens; _rightTokens = tmp; string s = _lString; _lString = _rString; _rString = s; } leftLen = _leftTokens.Length - 1; rightLen = _rightTokens.Length - 1; this.Initialize(); }
public MatchsMaker MyInit() { ISimilarity editdistance = new Leven(); getSimilarity = new Similarity(editdistance.GetSimilarity); //ISimilarity lexical=new LexicalSimilarity() ; //getSimilarity=new Similarity(lexical.GetSimilarity) ; Tokeniser tokeniser = new Tokeniser(); _leftTokens = tokeniser.Partition(_lString); _rightTokens = tokeniser.Partition(_rString); if (_leftTokens.Length > _rightTokens.Length) { string[] tmp = _leftTokens; _leftTokens = _rightTokens; _rightTokens = tmp; string s = _lString; _lString = _rString; _rString = s; } leftLen = _leftTokens.Length - 1; rightLen = _rightTokens.Length - 1; Initialize(); return(this); }
private void MyInit() { ISimilarity editdistance=new Leven() ; getSimilarity=new Similarity(editdistance.GetSimilarity) ; //ISimilarity lexical=new LexicalSimilarity() ; //getSimilarity=new Similarity(lexical.GetSimilarity) ; Tokeniser tokeniser=new Tokeniser() ; _leftTokens=tokeniser.Partition(_lString); _rightTokens=tokeniser.Partition(_rString); if (_leftTokens.Length > _rightTokens.Length) { string[] tmp=_leftTokens; _leftTokens=_rightTokens; _rightTokens=tmp; string s=_lString; _lString=_rString; _rString=s; } leftLen=_leftTokens.Length - 1 ; rightLen=_rightTokens.Length - 1; Initialize(); }
static string[] GetAllDefinitionTokens(Search se) { string rels = ""; if (se.senses[0].senses != null) { foreach (SynSet ss in se.senses[0].senses) { foreach (var ww in ss.words) { rels += " " + ww.word; } rels += ss.defn; } } string[] toks = Tokenize.Partition(rels); return(toks); }
public float GetScore(string string1, string string2) { Tokeniser tok = new Tokeniser(); tok.UseStemming = false; _source = tok.Partition(string1); _target = tok.Partition(string2); if (_source.Length == 0 || _target.Length == 0) { return(0F); } float[][] simMatrix = GetSimilarityMatrix(_source, _target); HeuristicMatcher match = new HeuristicMatcher(); //float score = HeuristicMatcher.ComputeSetSimilarity(simMatrix, 2, 0.3F); float score = HeuristicMatcher.ComputeSetSimilarity(simMatrix, 1); return(score); }
public float GetScore(string string1, string string2) { Tokeniser tok=new Tokeniser() ; tok.UseStemming = false; _source=tok.Partition(string1) ; _target=tok.Partition(string2) ; if (_source.Length == 0 || _target.Length == 0 ) return 0F; float[][] simMatrix = GetSimilarityMatrix(_source, _target); HeuristicMatcher match=new HeuristicMatcher() ; //float score = HeuristicMatcher.ComputeSetSimilarity(simMatrix, 2, 0.3F); float score = HeuristicMatcher.ComputeSetSimilarity(simMatrix, 1); return score; }