예제 #1
0
 /// <summary>
 /// constructor
 /// </summary>
 /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param>
 public CosineSimilarity(ITokeniser tokeniserToUse) {
     tokeniser = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities<string>();
 }
예제 #2
0
 /// <summary>
 /// constructor
 /// </summary>
 /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param>
 public BlockDistance(ITokeniser tokeniserToUse)
 {
     tokeniser      = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities <string>();
 }
예제 #3
0
 /// <summary>
 /// constructor
 /// </summary>
 /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param>
 public DiceSimilarity(ITokeniser tokeniserToUse)
 {
     tokeniser      = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities <string>();
 }
 public MatchingCoefficient(ITokeniser tokeniserToUse)
 {
     this.estimatedTimingConstant = 0.00019999999494757503;
     this.tokeniser      = tokeniserToUse;
     this.tokenUtilities = new TokeniserUtilities <string>();
 }
 /// <summary>
 /// default constructor
 /// </summary>
 public TokeniserWhitespace() {
     stopWordHandler = new DummyStopTermHandler();
     tokenUtilities = new TokeniserUtilities<string>();
 }
예제 #6
0
 public DiceSimilarity(ITokeniser tokeniserToUse)
 {
     this.estimatedTimingConstant = 3.4457139008736704E-07;
     this.tokeniser      = tokeniserToUse;
     this.tokenUtilities = new TokeniserUtilities <string>();
 }
예제 #7
0
 public BlockDistance(ITokeniser tokeniserToUse)
 {
     _estimatedTimingConstant = 6.4457140979357064E-05;
     _tokeniser      = tokeniserToUse;
     _tokenUtilities = new TokeniserUtilities <string>();
 }
 /// <summary>
 /// constructor
 /// </summary>
 /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param>
 public EuclideanDistance(ITokeniser tokeniserToUse)
 {
     tokeniser      = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities <string>();
 }
예제 #9
0
 public CosineSimilarity(ITokeniser tokeniserToUse)
 {
     this.estimatedTimingConstant = 3.8337140040312079E-07;
     this.tokeniser      = tokeniserToUse;
     this.tokenUtilities = new TokeniserUtilities <string>();
 }
예제 #10
0
 /// <summary>
 /// the tokeniser to use; should a different tokeniser be required
 /// </summary>
 /// <param name="tokeniserToUse"></param>
 public QGramsDistance(ITokeniser tokeniserToUse) {
     tokeniser = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities<string>();
 }
예제 #11
0
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="tokeniserToUse">the tokeniser to use should a different tokeniser be required</param>
 public OverlapCoefficient(ITokeniser tokeniserToUse)
 {
     tokeniser      = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities <string>();
 }
예제 #12
0
 /// <summary>
 /// constructor
 /// </summary>
 public TokeniserQGram3() {
     StopWordHandler = new DummyStopTermHandler();
     TokenUtilities = new TokeniserUtilities<string>();
     CharacterCombinationIndex = 0;
     QGramLength = 3;
 }
예제 #13
0
 /// <summary>
 /// the tokeniser to use should a different tokeniser be required
 /// </summary>
 /// <param name="tokeniserToUse"></param>
 public MatchingCoefficient(ITokeniser tokeniserToUse)
 {
     tokeniser      = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities <string>();
 }
 /// <summary>
 /// constructor
 /// </summary>
 public TokeniserQGram3ExtendedOriginal() {
     stopWordHandler = new DummyStopTermHandler();
     tokenUtilities = new TokeniserUtilities<string>();
 }
예제 #15
0
 public JaccardSimilarity(ITokeniser tokeniserToUse)
 {
     _estimatedTimingConstant = 0.00014000000373926014;
     _tokeniser      = tokeniserToUse;
     _tokenUtilities = new TokeniserUtilities <string>();
 }
 /// <summary>
 /// the tokeniser to use should a different tokeniser be required
 /// </summary>
 /// <param name="tokeniserToUse"></param>
 public MatchingCoefficient(ITokeniser tokeniserToUse) {
     tokeniser = tokeniserToUse;
     tokenUtilities = new TokeniserUtilities<string>();
 }
예제 #17
0
 public EuclideanDistance(ITokeniser tokeniserToUse)
 {
     this.estimatedTimingConstant = 7.4457137088757008E-05;
     this.tokeniser      = tokeniserToUse;
     this.tokenUtilities = new TokeniserUtilities <string>();
 }
 public OverlapCoefficient(ITokeniser tokeniserToUse)
 {
     this.estimatedTimingConstant = 0.00014000000373926014;
     this.tokeniser      = tokeniserToUse;
     this.tokenUtilities = new TokeniserUtilities <string>();
 }
예제 #19
0
 public QGramsDistance(ITokeniser tokeniserToUse)
 {
     this.estimatedTimingConstant = 0.0001340000017080456;
     this.tokeniser      = tokeniserToUse;
     this.tokenUtilities = new TokeniserUtilities <string>();
 }