Ejemplo n.º 1
0
        public void optimize()
        {
            curpars = new float[npars];
            lstpars = new float[npars];

            fcalled = 0;
            iter    = 0;
            while (!converged() && iter < maxiter)
            {
                // calculate gradient at curpnt
                var baseval = F(curpars);
                var grad    = new float[npars];
                if (verbose)
                {
                    System.Console.WriteLine("baseval:" + baseval);
                }
                for (int i = 0; i < npars; i++)
                {
                    var pparsP = peturbcurpars(i, stepSize, +1);
                    var pparsM = peturbcurpars(i, stepSize, -1);
                    var valP   = F(pparsP);
                    var valM   = F(pparsM);
                    grad[i] = (valP - valM) / (2 * stepSize);
                    var c = curpars[i];
                    var l = lstpars[i];
                    var g = grad[i];
                    if (verbose)
                    {
                        System.Console.WriteLine("    i:" + i + " l:" + l + " c:" + c + "  valP:" + valP + "  valM:" + valM + " g:" + g);
                    }
                }
                var a       = findBestA(curpars, grad);
                var nxtpars = calcGradPoint(curpars, a, grad);

                lstpars = curpars;
                curpars = nxtpars;
                iter   += 1;
            }
            if (iter >= 100)
            {
                status = optStatusE.didnotconverge;
                Console.WriteLine("Did not converge");
            }
            else
            {
                status = optStatusE.converged;
            }
            // optimized value is now in curpars
            unpackIntoF(curpars);
            if (verbose)
            {
                Console.WriteLine("Optimized status" + status + " iter:" + iter + "  fcalled:" + fcalled);
            }
        }
Ejemplo n.º 2
0
 public void init()
 {
     if (oaplist.Count == 0)
     {
         throw new Exception("No anchor points to initialize on");
     }
     curpars = new float[npars];
     bstpars = new float[npars];
     lstpars = new float[npars];
     bstval  = 9e30f;
     iter    = -2;
     //var v = F(curpars);
     status = optStatusE.initialized;
     randomSamplePhase();
 }
Ejemplo n.º 3
0
        public oapOptimizer(optTypeSelectorE mode)
        {
            this.mode = mode;
            oaplist   = new LinkedList <optimAnchorPoint>();
            switch (mode)
            {
            case optTypeSelectorE.rotY:
                npars = 1;
                break;

            case optTypeSelectorE.transXYZ:
                npars = 3;
                break;

            case optTypeSelectorE.rotYtransXYZ:
                npars = 4;
                break;
            }
            status = optStatusE.uninitialized;
        }
Ejemplo n.º 4
0
        public void optimize()
        {
            curpars = new float[npars];
            lstpars = new float[npars];

            fcalled = 0;
            iter    = 0;
            if (verbosity == verbosityE.verbose)
            {
                GraphUtil.Log("Starting Optim - Oap num:" + oaplist.Count + " npars:" + npars);
                Debug.Log("Starting Optim - Oap num:" + oaplist.Count + " npars:" + npars);
            }
            while (!converged() && iter < maxiter)
            {
                // calculate gradient at curpnt
                var baseval = F(curpars);
                var grad    = new float[npars];
                if (verbosity == verbosityE.verbose)
                {
                    GraphUtil.Log("baseval:" + baseval + " grad:" + grad);
                    Debug.Log("baseval:" + baseval + " grad:" + grad);
                }
                for (int ip = 0; ip < npars; ip++)
                {
                    var pparsP = peturbcurpars(ip, stepSize, +1);
                    var pparsM = peturbcurpars(ip, stepSize, -1);
                    var valP   = F(pparsP);
                    var valM   = F(pparsM);
                    grad[ip] = (valP - valM) / (2 * stepSize);
                    var c = curpars[ip];
                    var l = lstpars[ip];
                    var g = grad[ip];
                    if (verbosity == verbosityE.verbose)
                    {
                        GraphUtil.Log("    par:" + ip + " l:" + l + " c:" + c + "  valP:" + valP + "  valM:" + valM + " g:" + g);
                        Debug.Log("    par:" + ip + " l:" + l + " c:" + c + "  valP:" + valP + "  valM:" + valM + " g:" + g);
                    }
                }
                var a       = findBestA(curpars, grad);
                var nxtpars = calcGradPoint(curpars, a, grad);

                lstpars = curpars;
                curpars = nxtpars;
                iter   += 1;
            }
            if (iter >= 100)
            {
                status = optStatusE.didnotconverge;
                GraphUtil.Log("Did not converge");
                Debug.Log("Did not converge");
            }
            else
            {
                status = optStatusE.converged;
            }
            // optimized value is now in curpars
            //unpackIntoF(curpars);
            unpackIntoF(bstpars);
            if (verbosity == verbosityE.info)
            {
                GraphUtil.Log("Optimized status" + status + "  iter:" + iter + "  fcalled:" + fcalled);
                Debug.Log("Optimized status" + status + "  iter:" + iter + "  fcalled:" + fcalled);
            }
        }