public virtual MultinomialLogisticClassifier <L, F> TrainClassifier(GeneralDataset <L, F> dataset)
 {
     numClasses  = dataset.NumClasses();
     numFeatures = dataset.NumFeatures();
     data        = dataset.GetDataArray();
     if (dataset is RVFDataset <object, object> )
     {
         dataValues = dataset.GetValuesArray();
     }
     else
     {
         dataValues = LogisticUtils.InitializeDataValues(data);
     }
     AugmentFeatureMatrix(data, dataValues);
     labels = dataset.GetLabelsArray();
     return(new MultinomialLogisticClassifier <L, F>(TrainWeights(), dataset.featureIndex, dataset.labelIndex));
 }
Example #2
0
        public virtual LogisticClassifier <L, F> TrainClassifier(GeneralDataset <L, F> data, double l1reg, double tol, LogPrior prior, bool biased)
        {
            if (data is RVFDataset)
            {
                ((RVFDataset <L, F>)data).EnsureRealValues();
            }
            if (data.labelIndex.Size() != 2)
            {
                throw new Exception("LogisticClassifier is only for binary classification!");
            }
            IMinimizer <IDiffFunction> minim;

            if (!biased)
            {
                LogisticObjectiveFunction lof = null;
                if (data is Dataset <object, object> )
                {
                    lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetLabelsArray(), prior);
                }
                else
                {
                    if (data is RVFDataset <object, object> )
                    {
                        lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetValuesArray(), data.GetLabelsArray(), prior);
                    }
                }
                if (l1reg > 0.0)
                {
                    minim = ReflectionLoading.LoadByReflection("edu.stanford.nlp.optimization.OWLQNMinimizer", l1reg);
                }
                else
                {
                    minim = new QNMinimizer(lof);
                }
                weights = minim.Minimize(lof, tol, new double[data.NumFeatureTypes()]);
            }
            else
            {
                BiasedLogisticObjectiveFunction lof = new BiasedLogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetLabelsArray(), prior);
                if (l1reg > 0.0)
                {
                    minim = ReflectionLoading.LoadByReflection("edu.stanford.nlp.optimization.OWLQNMinimizer", l1reg);
                }
                else
                {
                    minim = new QNMinimizer(lof);
                }
                weights = minim.Minimize(lof, tol, new double[data.NumFeatureTypes()]);
            }
            featureIndex = data.featureIndex;
            classes[0]   = data.labelIndex.Get(0);
            classes[1]   = data.labelIndex.Get(1);
            return(new LogisticClassifier <L, F>(weights, featureIndex, classes));
        }
Example #3
0
        public virtual LogisticClassifier <L, F> TrainWeightedData(GeneralDataset <L, F> data, float[] dataWeights)
        {
            if (data is RVFDataset)
            {
                ((RVFDataset <L, F>)data).EnsureRealValues();
            }
            if (data.labelIndex.Size() != 2)
            {
                throw new Exception("LogisticClassifier is only for binary classification!");
            }
            IMinimizer <IDiffFunction> minim;
            LogisticObjectiveFunction  lof = null;

            if (data is Dataset <object, object> )
            {
                lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetLabelsArray(), new LogPrior(LogPrior.LogPriorType.Quadratic), dataWeights);
            }
            else
            {
                if (data is RVFDataset <object, object> )
                {
                    lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetValuesArray(), data.GetLabelsArray(), new LogPrior(LogPrior.LogPriorType.Quadratic), dataWeights);
                }
            }
            minim        = new QNMinimizer(lof);
            weights      = minim.Minimize(lof, 1e-4, new double[data.NumFeatureTypes()]);
            featureIndex = data.featureIndex;
            classes[0]   = data.labelIndex.Get(0);
            classes[1]   = data.labelIndex.Get(1);
            return(new LogisticClassifier <L, F>(weights, featureIndex, classes));
        }
 public BiasedLogConditionalObjectiveFunction(GeneralDataset <object, object> dataset, double[][] confusionMatrix, LogPrior prior)
     : this(dataset.NumFeatures(), dataset.NumClasses(), dataset.GetDataArray(), dataset.GetLabelsArray(), confusionMatrix, prior)
 {
 }
Example #5
0
        public virtual void TrainWeightedData(GeneralDataset <L, F> data, float[] dataWeights)
        {
            //Use LogisticClassifierFactory to train instead.
            if (data.labelIndex.Size() != 2)
            {
                throw new Exception("LogisticClassifier is only for binary classification!");
            }
            IMinimizer <IDiffFunction> minim;
            LogisticObjectiveFunction  lof = null;

            if (data is Dataset <object, object> )
            {
                lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetLabelsArray(), prior, dataWeights);
            }
            else
            {
                if (data is RVFDataset <object, object> )
                {
                    lof = new LogisticObjectiveFunction(data.NumFeatureTypes(), data.GetDataArray(), data.GetValuesArray(), data.GetLabelsArray(), prior, dataWeights);
                }
            }
            minim        = new QNMinimizer(lof);
            weights      = minim.Minimize(lof, 1e-4, new double[data.NumFeatureTypes()]);
            featureIndex = data.featureIndex;
            classes[0]   = data.labelIndex.Get(0);
            classes[1]   = data.labelIndex.Get(1);
        }
Example #6
0
 // amount of add-k smoothing of evidence
 // fudge to keep nonzero
 protected internal override double[][] TrainWeights(GeneralDataset <L, F> data)
 {
     return(TrainWeights(data.GetDataArray(), data.GetLabelsArray()));
 }