Exemple #1
0
        /// <summary>
        /// Trains an %SVM with optimal parameters.
        /// </summary>
        /// <param name="data">the training data that can be constructed using
        /// TrainData::create or TrainData::loadFromCSV.</param>
        /// <param name="kFold">Cross-validation parameter. The training set is divided into kFold subsets.
        /// One subset is used to test the model, the others form the train set. So, the %SVM algorithm is
        /// executed kFold times.</param>
        /// <param name="cGrid">grid for C</param>
        /// <param name="gammaGrid">grid for gamma</param>
        /// <param name="pGrid">grid for p</param>
        /// <param name="nuGrid">grid for nu</param>
        /// <param name="coeffGrid">grid for coeff</param>
        /// <param name="degreeGrid">grid for degree</param>
        /// <param name="balanced">If true and the problem is 2-class classification then the method creates
        /// more balanced cross-validation subsets that is proportions between classes in subsets are close
        /// to such proportion in the whole train dataset.</param>
        /// <returns></returns>
        public bool TrainAuto(TrainData data, int kFold = 10,
                              ParamGrid?cGrid           = null,
                              ParamGrid?gammaGrid       = null,
                              ParamGrid?pGrid           = null,
                              ParamGrid?nuGrid          = null,
                              ParamGrid?coeffGrid       = null,
                              ParamGrid?degreeGrid      = null,
                              bool balanced             = false)
        {
            throw new NotImplementedException();

            /*
             *  var cGridValue = cGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.C));
             * var gammaGridValue = gammaGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Gamma));
             * var pGridValue = pGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.P));
             * var nuGridValue = nuGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Nu));
             * var coeffGridValue = coeffGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Coef));
             * var degreeGridValue = degreeGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Degree));*/
        }
Exemple #2
0
 /// <summary>
 /// Trains the statistical model
 /// </summary>
 /// <param name="trainData">training data that can be loaded from file using TrainData::loadFromCSV 
 /// or created with TrainData::create.</param>
 /// <param name="flags"> optional flags, depending on the model. Some of the models can be updated with the 
 /// new training samples, not completely overwritten (such as NormalBayesClassifier or ANN_MLP).</param>
 /// <returns></returns>
 public virtual bool Train(TrainData trainData, int flags = 0)
 {
     throw new NotImplementedException();
 }
Exemple #3
0
 /// <summary>
 /// Computes error on the training or test dataset
 /// </summary>
 /// <param name="data">the training data</param>
 /// <param name="test">if true, the error is computed over the test subset of the data, 
 /// otherwise it's computed over the training subset of the data. Please note that if you 
 /// loaded a completely different dataset to evaluate already trained classifier, you will 
 /// probably want not to set the test subset at all with TrainData::setTrainTestSplitRatio 
 /// and specify test=false, so that the error is computed for the whole new set. Yes, this 
 /// sounds a bit confusing.</param>
 /// <param name="resp">the optional output responses.</param>
 /// <returns></returns>
 public virtual float CalcError(TrainData data, bool test, OutputArray resp)
 {
     throw new NotImplementedException();
 }
Exemple #4
0
 /// <summary>
 /// Trains the statistical model
 /// </summary>
 /// <param name="trainData">training data that can be loaded from file using TrainData::loadFromCSV
 /// or created with TrainData::create.</param>
 /// <param name="flags"> optional flags, depending on the model. Some of the models can be updated with the
 /// new training samples, not completely overwritten (such as NormalBayesClassifier or ANN_MLP).</param>
 /// <returns></returns>
 public virtual bool Train(TrainData trainData, int flags = 0)
 {
     throw new NotImplementedException();
 }
Exemple #5
0
 /// <summary>
 /// Computes error on the training or test dataset
 /// </summary>
 /// <param name="data">the training data</param>
 /// <param name="test">if true, the error is computed over the test subset of the data,
 /// otherwise it's computed over the training subset of the data. Please note that if you
 /// loaded a completely different dataset to evaluate already trained classifier, you will
 /// probably want not to set the test subset at all with TrainData::setTrainTestSplitRatio
 /// and specify test=false, so that the error is computed for the whole new set. Yes, this
 /// sounds a bit confusing.</param>
 /// <param name="resp">the optional output responses.</param>
 /// <returns></returns>
 public virtual float CalcError(TrainData data, bool test, OutputArray resp)
 {
     throw new NotImplementedException();
 }
Exemple #6
0
        /// <summary>
        /// Trains an %SVM with optimal parameters.
        /// </summary>
        /// <param name="data">the training data that can be constructed using 
        /// TrainData::create or TrainData::loadFromCSV.</param>
        /// <param name="kFold">Cross-validation parameter. The training set is divided into kFold subsets. 
        /// One subset is used to test the model, the others form the train set. So, the %SVM algorithm is 
        /// executed kFold times.</param>
        /// <param name="cGrid">grid for C</param>
        /// <param name="gammaGrid">grid for gamma</param>
        /// <param name="pGrid">grid for p</param>
        /// <param name="nuGrid">grid for nu</param>
        /// <param name="coeffGrid">grid for coeff</param>
        /// <param name="degreeGrid">grid for degree</param>
        /// <param name="balanced">If true and the problem is 2-class classification then the method creates 
        /// more balanced cross-validation subsets that is proportions between classes in subsets are close 
        /// to such proportion in the whole train dataset.</param>
        /// <returns></returns>
	    public bool TrainAuto(TrainData data, int kFold = 10,
	        ParamGrid? cGrid = null,
	        ParamGrid? gammaGrid = null,
	        ParamGrid? pGrid = null,
	        ParamGrid? nuGrid = null,
	        ParamGrid? coeffGrid = null,
            ParamGrid? degreeGrid = null,
	        bool balanced = false)
	    {
            throw new NotImplementedException();
            /*
	        var cGridValue = cGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.C));
            var gammaGridValue = gammaGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Gamma));
            var pGridValue = pGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.P));
            var nuGridValue = nuGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Nu));
            var coeffGridValue = coeffGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Coef));
            var degreeGridValue = degreeGrid.GetValueOrDefault(GetDefaultGrid(ParamTypes.Degree));*/
        }