Ejemplo n.º 1
0
        static void test()
        {
            //Console.WriteLine("test data ...");
            Global.swLog.WriteLine("reading test data...");
            dataSet XX = new dataSet(Global.fFeatureTest, Global.fGoldTest);

            Console.WriteLine("test data size: {0}", XX.Count);
            Global.swLog.WriteLine("Done! test data size: {0}", XX.Count);
            //load model & feature files for testing
            toolbox tb = new toolbox(XX, false);

            //Stopwatch timer = new Stopwatch();
            // timer.Start();

            List <double> scoreList = tb.test(XX, 0);

            //timer.Stop();
            //double time = timer.ElapsedMilliseconds / 1000.0;

            //Global.timeList.Add(time);
            //double score = scoreList[0];
            //Global.scoreListList.Add(scoreList);

            //resSummarize.write();
            //return score;
        }
Ejemplo n.º 2
0
        static void crossValidation()
        {
            //load data
            Console.WriteLine("reading cross validation data...");
            Global.swLog.WriteLine("reading cross validation data...");
            List <dataSet> XList  = new List <dataSet>();
            List <dataSet> XXList = new List <dataSet>();

            loadDataForCV(XList, XXList);

            //start cross validation
            foreach (double r in Global.regList)//do CV for each different regularizer r (sigma)
            {
                Global.swLog.WriteLine("\ncross validation. r={0}", r);
                Console.WriteLine("\ncross validation. r={0}", r);
                if (Global.rawResWrite)
                {
                    Global.swResRaw.WriteLine("% cross validation. r={0}", r);
                }
                for (int i = 0; i < Global.nCV; i++)
                {
                    Global.swLog.WriteLine("\n#validation={0}", i + 1);
                    Console.WriteLine("\n#validation={0}", i + 1);
                    if (Global.rawResWrite)
                    {
                        Global.swResRaw.WriteLine("% #validation={0}", i + 1);
                    }
                    Global.reg = r;
                    dataSet Xi = XList[i];
                    if (Global.runMode.Contains("rich"))
                    {
                        toolboxRich tb = new toolboxRich(Xi);
                        basicTrain(XXList[i], tb);
                    }
                    else
                    {
                        toolbox tb = new toolbox(Xi);
                        basicTrain(XXList[i], tb);
                    }

                    resSummarize.write();
                    if (Global.rawResWrite)
                    {
                        Global.swResRaw.WriteLine();
                    }
                }
                if (Global.rawResWrite)
                {
                    Global.swResRaw.WriteLine();
                }
            }
        }
Ejemplo n.º 3
0
        public optimStochastic(toolbox tb)
        {
            _model = tb.Model;
            _X     = tb.X;
            _inf   = tb.Inf;
            _fGene = tb.FGene;
            _grad  = tb.Grad;
            //init
            int fsize = _model.W.Length;

            Global.decayList = new List <double>(new double[fsize]);
            listTool.listSet(ref Global.decayList, Global.rate0);
        }
Ejemplo n.º 4
0
        static double train()
        {
            //if (Global.formatConvert)
            //{
            //    dataFormat df = new dataFormat();
            //    df.convert();
            //}
            //load data
            Console.WriteLine("\nreading training & test data...");
            Global.swLog.WriteLine("\nreading training & test data...");
            dataSet X, XX;

            if (Global.runMode.Contains("tune"))//put "tune" related code here because train() could be sub-function of tune()
            {
                dataSet origX = new dataSet(Global.fFeatureTrain, Global.fGoldTrain);
                X  = new dataSet();
                XX = new dataSet();
                dataSplit(origX, Global.tuneSplit, X, XX);
            }
            else
            {
                X  = new dataSet(Global.fFeatureTrain, Global.fGoldTrain);
                XX = new dataSet(Global.fFeatureTest, Global.fGoldTest);
                dataSizeScale(X);
            }
            Console.WriteLine("done! train/test data sizes: {0}/{1}", X.Count, XX.Count);
            Global.swLog.WriteLine("done! train/test data sizes: {0}/{1}", X.Count, XX.Count);
            double score = 0;

            //start training
            foreach (double r in Global.regList)//train on different r (sigma)
            {
                Global.reg = r;
                Global.swLog.WriteLine("\nr: " + r.ToString());
                Console.WriteLine("\nr: " + r.ToString());
                if (Global.rawResWrite)
                {
                    Global.swResRaw.WriteLine("\n%r: " + r.ToString());
                }
                toolbox tb = new toolbox(X, true);
                score = basicTrain(XX, tb);
                resSummarize.write();//summarize the results & output the summarized results

                if (Global.save == 1)
                {
                    tb.Model.save(Global.fModel);//save model as a .txt file
                }
            }
            return(score);
        }
Ejemplo n.º 5
0
        //this function can be called by train(), cv(), & richEdge.train()
        public static double basicTrain(dataSet XTest, toolbox tb)
        {
            Global.reinitGlobal();
            double score = 0;

            if (Global.modelOptimizer.EndsWith("bfgs"))
            {
                Global.tb = tb;
                Global.XX = XTest;

                tb.train();
                score = Global.scoreListList[Global.scoreListList.Count - 1][0];
            }
            else
            {
                for (int i = 0; i < Global.ttlIter; i++)
                {
                    Global.glbIter++;
                    Stopwatch timer = new Stopwatch();
                    timer.Start();

                    double err = tb.train();

                    timer.Stop();
                    double time = timer.ElapsedMilliseconds / 1000.0;

                    Global.timeList.Add(time);
                    Global.errList.Add(err);
                    Global.diffList.Add(Global.diff);

                    List <double> scoreList = tb.test(XTest, i);
                    score = scoreList[0];
                    Global.scoreListList.Add(scoreList);

                    Global.swLog.WriteLine("iter{0}  diff={1}  train-time(sec)={2}  {3}={4}%", Global.glbIter, Global.diff.ToString("e2"), time.ToString("f2"), Global.metric, score.ToString("f2"));
                    Global.swLog.WriteLine("------------------------------------------------");
                    Global.swLog.Flush();
                    Console.WriteLine("iter{0}  diff={1}  train-time(sec)={2}  {3}={4}%", Global.glbIter, Global.diff.ToString("e2"), time.ToString("f2"), Global.metric, score.ToString("f2"));

                    //if (Global.diff < Global.convergeTol)
                    //break;
                }
            }
            return(score);
        }
Ejemplo n.º 6
0
        public optimLBFGS(toolbox tb, float[] init, int m, double l1weight, double maxIter)
        {
            _model = tb.Model;
            _X     = tb.X;
            _inf   = tb.Inf;
            _fGene = tb.FGene;
            _grad  = tb.Grad;

            double[] wInit = new double[init.Length];
            for (int i = 0; i < init.Length; i++)
            {
                wInit[i] = (double)init[i];
            }

            double[] tmpAry = new double[wInit.Length];
            _w               = new List <double>(wInit);
            _gradList        = new List <double>(tmpAry);
            _newW            = new List <double>(wInit);
            _newGradList     = new List <double>(tmpAry);
            _dir             = new List <double>(tmpAry);
            _steepestDescDir = new List <double>(_newGradList);
            double[] tmpAry2 = new double[m];
            _alphas   = new List <double>(tmpAry2);
            _iter     = 0;
            _memo     = m;
            _dim      = wInit.Length;
            _l1weight = l1weight;
            _maxIter  = maxIter;

            if (m <= 0)
            {
                throw new Exception("m must be an integer greater than zero.");
            }
            _value = evalL1();
            listTool.listSet(ref _gradList, _newGradList);
        }
Ejemplo n.º 7
0
 public inference(toolbox tb)
 {
     _optim = tb.Optim;
     _fGene = tb.FGene;
     _grad  = tb.Grad;
 }
Ejemplo n.º 8
0
 public gradient(toolbox tb)
 {
     _optim = tb.Optim;
     _inf   = tb.Inf;
     _fGene = tb.FGene;
 }
Ejemplo n.º 9
0
 public gradRich(toolbox tb)
     : base(tb)
 {
 }
Ejemplo n.º 10
0
 public inferRich(toolbox tb)
     : base(tb)
 {
 }