Пример #1
0
 /// <summary>Apply a function to every element of every component of this vector, and replace with the result.</summary>
 /// <param name="fn">the function to apply to every element of every component.</param>
 public virtual void MapInPlace(IDoubleUnaryOperator fn)
 {
     for (int i = 0; i < pointers.Length; i++)
     {
         if (pointers[i] == null)
         {
             continue;
         }
         if (copyOnWrite[i])
         {
             copyOnWrite[i] = false;
             pointers[i]    = pointers[i].MemberwiseClone();
         }
         if (sparse[i])
         {
             pointers[i][1] = fn.ApplyAsDouble(pointers[i][1]);
         }
         else
         {
             for (int j = 0; j < pointers[i].Length; j++)
             {
                 pointers[i][j] = fn.ApplyAsDouble(pointers[i][j]);
             }
         }
     }
 }
Пример #2
0
 public virtual double Minimize(IDoubleUnaryOperator function, double tol, double low, double high)
 {
     this.tol  = tol;
     this.low  = low;
     this.high = high;
     return(Minimize(function));
 }
Пример #3
0
 public virtual void Map(IDoubleUnaryOperator fn)
 {
     for (int i = 0; i < values.Length; i++)
     {
         for (int j = 0; j < values[i].Length; j++)
         {
             values[i][j] = fn.ApplyAsDouble(values[i][j]);
         }
     }
 }
        // end class class CRFBiasedClassifierOptimizer
        /// <summary>Adjust the bias parameter to optimize some objective function.</summary>
        /// <remarks>
        /// Adjust the bias parameter to optimize some objective function.
        /// Note that this function only tunes the bias parameter of one class
        /// (class of index 0), and is thus only useful for binary classification
        /// problems.
        /// </remarks>
        public virtual void AdjustBias(IList <IList <IN> > develData, IDoubleUnaryOperator evalFunction, double low, double high)
        {
            ILineSearcher ls = new GoldenSectionLineSearch(true, 1e-2, low, high);

            CRFBiasedClassifier.CRFBiasedClassifierOptimizer optimizer = new CRFBiasedClassifier.CRFBiasedClassifierOptimizer(this, this, evalFunction);
            double optVal = ls.Minimize(optimizer);
            int    bi     = featureIndex.IndexOf(Bias);

            log.Info("Class bias of " + weights[bi][0] + " reaches optimal value " + optVal);
        }
        public virtual void TestEasy()
        {
            GoldenSectionLineSearch min = new GoldenSectionLineSearch(false, 0.00001, 0.0, 1.0, false);
            IDoubleUnaryOperator    f2  = null;

            // this function used to fail in Galen's version; min should be 0.2
            // return - x * (2 * x - 1) * (x - 0.8);
            // this function fails if you don't find an initial bracketing
            // return - Math.sin(x * Math.PI);
            // return -(3 + 6 * x - 4 * x * x);
            NUnit.Framework.Assert.AreEqual(min.Minimize(f2), 1E-4, 0.15);
        }
Пример #6
0
        /// <summary>
        /// This method will cross validate on the given data and number of folds
        /// to find the optimal C.
        /// </summary>
        /// <remarks>
        /// This method will cross validate on the given data and number of folds
        /// to find the optimal C.  The scorer is how you determine what to
        /// optimize for (F-score, accuracy, etc).  The C is then saved, so that
        /// if you train a classifier after calling this method, that C will be used.
        /// </remarks>
        public virtual void HeldOutSetC(GeneralDataset <L, F> trainSet, GeneralDataset <L, F> devSet, IScorer <L> scorer, ILineSearcher minimizer)
        {
            useAlphaFile = true;
            bool oldUseSigmoid = useSigmoid;

            useSigmoid = false;
            IDoubleUnaryOperator negativeScorer = null;

            C            = minimizer.Minimize(negativeScorer);
            useAlphaFile = false;
            useSigmoid   = oldUseSigmoid;
        }
Пример #7
0
        public static void Main(string[] args)
        {
            Edu.Stanford.Nlp.Optimization.GoldenSectionLineSearch min = new Edu.Stanford.Nlp.Optimization.GoldenSectionLineSearch(true, 0.00001, 0.001, 121.0);
            IDoubleUnaryOperator f1 = null;

            System.Console.Out.WriteLine(min.Minimize(f1));
            System.Console.Out.WriteLine();
            min = new Edu.Stanford.Nlp.Optimization.GoldenSectionLineSearch(false, 0.00001, 0.0, 1.0);
            IDoubleUnaryOperator f2 = null;

            System.Console.Out.WriteLine(min.Minimize(f2));
        }
Пример #8
0
        private void TuneSigma(int[][] data, int[] labels)
        {
            IDoubleUnaryOperator CVSigmaToPerplexity = null;
            //test if enough training data
            //leave-one-out
            //System.out.println("CV j: "+ j);
            //System.out.println("test i: "+ i + " "+ new BasicDatum(featureIndex.objects(data[i])));
            //System.err.printf("%d: %8g%n", j, score);
            GoldenSectionLineSearch gsls = new GoldenSectionLineSearch(true);

            sigma = gsls.Minimize(CVSigmaToPerplexity, 0.01, 0.0001, 2.0);
            System.Console.Out.WriteLine("Sigma used: " + sigma);
        }
Пример #9
0
        public virtual void DiscretizeCompute(IDoubleUnaryOperator function, int numPoints, double low, double high)
        {
            double inc = (high - low) / numPoints;

            memory = Generics.NewHashMap();
            for (int i = 0; i < numPoints; i++)
            {
                double x = low + i * inc;
                double y = function.ApplyAsDouble(x);
                memory[x] = y;
                log.Info("for point " + x + '\t' + y);
            }
            DumpMemory();
        }
Пример #10
0
        /// <summary>
        /// This method will cross validate on the given data and number of folds
        /// to find the optimal C.
        /// </summary>
        /// <remarks>
        /// This method will cross validate on the given data and number of folds
        /// to find the optimal C.  The scorer is how you determine what to
        /// optimize for (F-score, accuracy, etc).  The C is then saved, so that
        /// if you train a classifier after calling this method, that C will be used.
        /// </remarks>
        public virtual void CrossValidateSetC(GeneralDataset <L, F> dataset, int numFolds, IScorer <L> scorer, ILineSearcher minimizer)
        {
            System.Console.Out.WriteLine("in Cross Validate");
            useAlphaFile = true;
            bool oldUseSigmoid = useSigmoid;

            useSigmoid = false;
            CrossValidator <L, F> crossValidator = new CrossValidator <L, F>(dataset, numFolds);
            IToDoubleFunction <Triple <GeneralDataset <L, F>, GeneralDataset <L, F>, CrossValidator.SavedState> > score = null;
            //train(trainSet,true,true);
            IDoubleUnaryOperator negativeScorer = null;

            C            = minimizer.Minimize(negativeScorer);
            useAlphaFile = false;
            useSigmoid   = oldUseSigmoid;
        }
Пример #11
0
        private double GetRoot(IDoubleUnaryOperator func, double lower, double upper)
        {
            double mid   = 0.5 * (lower + upper);
            double Tol   = 1e-8;
            double skew  = 0.4;
            int    count = 0;

            if (func.ApplyAsDouble(upper) > 0 || func.ApplyAsDouble(lower) < 0)
            {
                Say("LOWER AND UPPER SUPPLIED TO GET ROOT DO NOT BOUND THE ROOT.");
            }
            double fval = func.ApplyAsDouble(mid);

            while (System.Math.Abs(fval) > Tol)
            {
                count += 1;
                if (fval > 0)
                {
                    lower = mid;
                }
                else
                {
                    if (fval < 0)
                    {
                        upper = mid;
                    }
                }
                mid  = skew * lower + (1 - skew) * upper;
                fval = func.ApplyAsDouble(mid);
                if (count > 100)
                {
                    break;
                }
            }
            Say("   " + nf.Format(mid) + "  f" + nf.Format(fval));
            return(mid);
        }
Пример #12
0
        public virtual double Minimize(IDoubleUnaryOperator function)
        {
            double tol  = this.tol;
            double low  = this.low;
            double high = this.high;
            // cdm Oct 2006: The code used to do nothing to find or check
            // the validity of an initial
            // bracketing; it just blindly placed the midpoint at the golden ratio
            // I now try to grid search a little in case the function is very flat
            // (RTE contradictions).
            double flow  = function.ApplyAsDouble(low);
            double fhigh = function.ApplyAsDouble(high);

            if (Verbose)
            {
                log.Info("Finding min between " + low + " (value: " + flow + ") and " + high + " (value: " + fhigh + ')');
            }
            double mid;
            double oldY;
            bool   searchRight;

            if (false)
            {
                // initialize with golden means
                mid  = GoldenMean(low, high);
                oldY = function.ApplyAsDouble(mid);
                if (Verbose)
                {
                    log.Info("Initially probed at " + mid + ", value is " + oldY);
                }
                if (oldY < flow || oldY < fhigh)
                {
                    searchRight = false;
                }
                else
                {
                    // Galen had this true; should be false
                    mid  = GoldenMean(high, low);
                    oldY = function.ApplyAsDouble(mid);
                    if (Verbose)
                    {
                        log.Info("Probed at " + mid + ", value is " + oldY);
                    }
                    searchRight = true;
                    if (!(oldY < flow || oldY < fhigh))
                    {
                        log.Info("Warning: GoldenSectionLineSearch init didn't find slope!!");
                    }
                }
            }
            else
            {
                // grid search a little; this case doesn't do geometric differently...
                if (Verbose)
                {
                    log.Info("20 point gridsearch for good mid point....");
                }
                double bestPoint = low;
                double bestVal   = flow;
                double incr      = (high - low) / 22.0;
                for (mid = low + incr; mid < high; mid += incr)
                {
                    oldY = function.ApplyAsDouble(mid);
                    if (Verbose)
                    {
                        log.Info("Probed at " + mid + ", value is " + oldY);
                    }
                    if (oldY < bestVal)
                    {
                        bestPoint = mid;
                        bestVal   = oldY;
                        if (Verbose)
                        {
                            log.Info(" [best so far!]");
                        }
                    }
                    if (Verbose)
                    {
                        log.Info();
                    }
                }
                mid         = bestPoint;
                oldY        = bestVal;
                searchRight = mid < low + (high - low) / 2.0;
                if (oldY < flow && oldY < fhigh)
                {
                    if (Verbose)
                    {
                        log.Info("Found a good mid point at (" + mid + ", " + oldY + ')');
                    }
                }
                else
                {
                    log.Info("Warning: GoldenSectionLineSearch grid search couldn't find slope!!");
                    // revert to initial positioning and pray
                    mid         = GoldenMean(low, high);
                    oldY        = function.ApplyAsDouble(mid);
                    searchRight = false;
                }
            }
            memory[mid] = oldY;
            while (geometric ? (high / low > 1 + tol) : high - low > tol)
            {
                if (Verbose)
                {
                    log.Info("Current low, mid, high: " + nf.Format(low) + ' ' + nf.Format(mid) + ' ' + nf.Format(high));
                }
                double newX = GoldenMean(searchRight ? high : low, mid);
                double newY = function.ApplyAsDouble(newX);
                memory[newX] = newY;
                if (Verbose)
                {
                    log.Info("Probed " + (searchRight ? "right" : "left") + " at " + newX + ", value is " + newY);
                }
                if (newY < oldY)
                {
                    // keep going in this direction
                    if (searchRight)
                    {
                        low = mid;
                    }
                    else
                    {
                        high = mid;
                    }
                    mid  = newX;
                    oldY = newY;
                }
                else
                {
                    // go the other way
                    if (searchRight)
                    {
                        high = newX;
                    }
                    else
                    {
                        low = newX;
                    }
                    searchRight = !searchRight;
                }
            }
            return(mid);
        }
 internal CRFBiasedClassifierOptimizer(CRFBiasedClassifier <In> _enclosing, CRFBiasedClassifier <IN> c, IDoubleUnaryOperator e)
 {
     this._enclosing   = _enclosing;
     this.crf          = c;
     this.evalFunction = e;
 }