public StatisticalCorefAlgorithm(Properties props, Dictionaries dictionaries, string wordCountsFile, string modelPath, int maxMentionDistance, int maxMentionDistanceWithStringMatch, double[] thresholds) { extractor = new FeatureExtractor(props, dictionaries, null, wordCountsFile); classifier = PairwiseModel.NewBuilder("classifier", MetaFeatureExtractor.NewBuilder().Build()).ModelPath(modelPath).Build(); this.maxMentionDistance = maxMentionDistance; this.maxMentionDistanceWithStringMatch = maxMentionDistanceWithStringMatch; this.thresholds = MakeThresholds(thresholds); }
public ClusteringCorefAlgorithm(Properties props, Dictionaries dictionaries, string clusteringPath, string classificationPath, string rankingPath, string anaphoricityPath, string wordCountsPath) { clusterer = new Clusterer(clusteringPath); classificationModel = PairwiseModel.NewBuilder("classification", MetaFeatureExtractor.NewBuilder().Build()).ModelPath(classificationPath).Build(); rankingModel = PairwiseModel.NewBuilder("ranking", MetaFeatureExtractor.NewBuilder().Build()).ModelPath(rankingPath).Build(); anaphoricityModel = PairwiseModel.NewBuilder("anaphoricity", MetaFeatureExtractor.AnaphoricityMFE()).ModelPath(anaphoricityPath).Build(); extractor = new FeatureExtractor(props, dictionaries, null, wordCountsPath); }
public FeatureExtractorRunner(Properties props, Dictionaries dictionaries) { documents = new List <DocumentExamples>(); compressor = new Compressor <string>(); extractor = new FeatureExtractor(props, dictionaries, compressor); try { dataset = IOUtils.ReadObjectFromFile(StatisticalCorefTrainer.datasetFile); } catch (Exception e) { throw new Exception("Error initializing FeatureExtractorRunner", e); } }