/// <summary> /// constructor /// </summary> /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param> public CosineSimilarity(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities<string>(); }
/// <summary> /// constructor /// </summary> /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param> public BlockDistance(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// constructor /// </summary> /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param> public DiceSimilarity(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities <string>(); }
public MatchingCoefficient(ITokeniser tokeniserToUse) { this.estimatedTimingConstant = 0.00019999999494757503; this.tokeniser = tokeniserToUse; this.tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// default constructor /// </summary> public TokeniserWhitespace() { stopWordHandler = new DummyStopTermHandler(); tokenUtilities = new TokeniserUtilities<string>(); }
public DiceSimilarity(ITokeniser tokeniserToUse) { this.estimatedTimingConstant = 3.4457139008736704E-07; this.tokeniser = tokeniserToUse; this.tokenUtilities = new TokeniserUtilities <string>(); }
public BlockDistance(ITokeniser tokeniserToUse) { _estimatedTimingConstant = 6.4457140979357064E-05; _tokeniser = tokeniserToUse; _tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// constructor /// </summary> /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param> public EuclideanDistance(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities <string>(); }
public CosineSimilarity(ITokeniser tokeniserToUse) { this.estimatedTimingConstant = 3.8337140040312079E-07; this.tokeniser = tokeniserToUse; this.tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// the tokeniser to use; should a different tokeniser be required /// </summary> /// <param name="tokeniserToUse"></param> public QGramsDistance(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities<string>(); }
/// <summary> /// Constructor /// </summary> /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param> public OverlapCoefficient(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// constructor /// </summary> public TokeniserQGram3() { StopWordHandler = new DummyStopTermHandler(); TokenUtilities = new TokeniserUtilities<string>(); CharacterCombinationIndex = 0; QGramLength = 3; }
/// <summary> /// the tokeniser to use should a different tokeniser be required /// </summary> /// <param name="tokeniserToUse"></param> public MatchingCoefficient(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// constructor /// </summary> public TokeniserQGram3ExtendedOriginal() { stopWordHandler = new DummyStopTermHandler(); tokenUtilities = new TokeniserUtilities<string>(); }
public JaccardSimilarity(ITokeniser tokeniserToUse) { _estimatedTimingConstant = 0.00014000000373926014; _tokeniser = tokeniserToUse; _tokenUtilities = new TokeniserUtilities <string>(); }
/// <summary> /// the tokeniser to use should a different tokeniser be required /// </summary> /// <param name="tokeniserToUse"></param> public MatchingCoefficient(ITokeniser tokeniserToUse) { tokeniser = tokeniserToUse; tokenUtilities = new TokeniserUtilities<string>(); }
public EuclideanDistance(ITokeniser tokeniserToUse) { this.estimatedTimingConstant = 7.4457137088757008E-05; this.tokeniser = tokeniserToUse; this.tokenUtilities = new TokeniserUtilities <string>(); }
public OverlapCoefficient(ITokeniser tokeniserToUse) { this.estimatedTimingConstant = 0.00014000000373926014; this.tokeniser = tokeniserToUse; this.tokenUtilities = new TokeniserUtilities <string>(); }
public QGramsDistance(ITokeniser tokeniserToUse) { this.estimatedTimingConstant = 0.0001340000017080456; this.tokeniser = tokeniserToUse; this.tokenUtilities = new TokeniserUtilities <string>(); }