public LogisticObjectiveFunction(int numFeatures, int[][] data, double[][] values, int[] labels, LogPrior prior, float[] dataweights) { this.numFeatures = numFeatures; this.data = data; this.labels = labels; this.prior = prior; this.dataweights = dataweights; this.dataValues = values; }
public BiasedLogConditionalObjectiveFunction(int numFeatures, int numClasses, int[][] data, int[] labels, double[][] confusionMatrix, LogPrior prior) { this.numFeatures = numFeatures; this.numClasses = numClasses; this.data = data; this.labels = labels; this.prior = prior; this.confusionMatrix = confusionMatrix; }
public ShiftParamsLogisticObjectiveFunction(int[][] data, double[][] dataValues, int[][] labels, int numClasses, int numFeatures, int numL2Parameters, LogPrior prior) { this.data = data; this.dataValues = dataValues; this.labels = labels; this.numClasses = numClasses; this.numFeatures = numFeatures; this.numL2Parameters = numL2Parameters; this.prior = prior; }
public SemiSupervisedLogConditionalObjectiveFunction(AbstractCachingDiffFunction objFunc, AbstractCachingDiffFunction biasedObjFunc, LogPrior prior, double convexComboFrac) { this.objFunc = objFunc; this.biasedObjFunc = biasedObjFunc; this.prior = prior; this.convexComboFrac = convexComboFrac; if (convexComboFrac < 0 || convexComboFrac > 1.0) { throw new Exception("convexComboFrac has to lie between 0 and 1 (both inclusive)."); } }
public virtual LogisticClassifier <L, F> TrainClassifier(GeneralDataset <L, F> data, double l1reg, double tol, LogPrior prior) { return(TrainClassifier(data, l1reg, tol, prior, false)); }
public virtual LogisticClassifier <L, F> TrainClassifier(GeneralDataset <L, F> data, LogPrior prior, bool biased) { return(TrainClassifier(data, 0.0, 1e-4, prior, biased)); }
public virtual void SetPrior(LogPrior prior) { this.prior = prior; }
public BiasedLogConditionalObjectiveFunction(GeneralDataset <object, object> dataset, double[][] confusionMatrix, LogPrior prior) : this(dataset.NumFeatures(), dataset.NumClasses(), dataset.GetDataArray(), dataset.GetLabelsArray(), confusionMatrix, prior) { }
public BiasedLogisticObjectiveFunction(int numFeatures, int[][] data, int[] labels, LogPrior prior) : this(numFeatures, data, labels, prior, null) { }
public LogisticObjectiveFunction(int numFeatures, int[][] data, int[] labels, LogPrior prior, float[] dataweights) : this(numFeatures, data, null, labels, prior, dataweights) { }
public SemiSupervisedLogConditionalObjectiveFunction(AbstractCachingDiffFunction objFunc, AbstractCachingDiffFunction biasedObjFunc, LogPrior prior) : this(objFunc, biasedObjFunc, prior, 0.5) { }
public static double GetValue(double[][] weights, LogPrior prior) { double[] flatWeights = Flatten(weights); return(prior.Compute(flatWeights, new double[flatWeights.Length])); }
public LogisticClassifier(LogPrior prior, bool biased) { //use in LogisticClassifierFactory instead this.prior = prior; this.biased = biased; }
public LogisticClassifier(LogPrior prior) { //use LogisticClassifierFactory instead //use in LogisticClassifierFactory instead. this.prior = prior; }
public virtual LogisticClassifier <L, F> TrainClassifier(GeneralDataset <L, F> data, double l1reg, double tol, LogPrior prior, bool biased) { if (data is RVFDataset) { ((RVFDataset <L, F>)data).EnsureRealValues(); } if (data.labelIndex.Size() != 2) { throw new Exception("LogisticClassifier is only for binary classification!"); } IMinimizer <IDiffFunction> minim; if (!biased) { LogisticObjectiveFunction lof = null; if (data is Dataset <object, object> ) { lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetLabelsArray(), prior); } else { if (data is RVFDataset <object, object> ) { lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetValuesArray(), data.GetLabelsArray(), prior); } } if (l1reg > 0.0) { minim = ReflectionLoading.LoadByReflection("edu.stanford.nlp.optimization.OWLQNMinimizer", l1reg); } else { minim = new QNMinimizer(lof); } weights = minim.Minimize(lof, tol, new double[data.NumFeatureTypes()]); } else { BiasedLogisticObjectiveFunction lof = new BiasedLogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetLabelsArray(), prior); if (l1reg > 0.0) { minim = ReflectionLoading.LoadByReflection("edu.stanford.nlp.optimization.OWLQNMinimizer", l1reg); } else { minim = new QNMinimizer(lof); } weights = minim.Minimize(lof, tol, new double[data.NumFeatureTypes()]); } featureIndex = data.featureIndex; classes[0] = data.labelIndex.Get(0); classes[1] = data.labelIndex.Get(1); return(new LogisticClassifier <L, F>(weights, featureIndex, classes)); }
public LogisticObjectiveFunction(int numFeatures, int[][] data, double[][] values, int[] labels, LogPrior prior) : this(numFeatures, data, values, labels, prior, null) { }
public AdaptedGaussianPriorObjectiveFunction(GeneralDataset <L, F> dataset, LogPrior prior, double[][] weights) : base(dataset, prior) { this.weights = To1D(weights); }
public ShiftParamsLogisticClassifierFactory(LogPrior prior, double lambda) { // NOTE: the current implementation only supports quadratic priors (or no prior) this.prior = prior; this.lambda = lambda; }