Ejemplo n.º 1
0
 public mlpcvreport(mlptrain.mlpcvreport obj)
 {
     _innerobj = obj;
 }
Ejemplo n.º 2
0
        public static bool testmlpe(bool silent)
        {
            bool result = new bool();
            bool waserrors = new bool();
            int passcount = 0;
            int maxn = 0;
            int maxhid = 0;
            int nf = 0;
            int nhid = 0;
            int nl = 0;
            int nhid1 = 0;
            int nhid2 = 0;
            int ec = 0;
            int nkind = 0;
            int algtype = 0;
            int tasktype = 0;
            int pass = 0;
            mlpe.mlpensemble ensemble = new mlpe.mlpensemble();
            mlptrain.mlpreport rep = new mlptrain.mlpreport();
            mlptrain.mlpcvreport oobrep = new mlptrain.mlpcvreport();
            double[,] xy = new double[0,0];
            int i = 0;
            int j = 0;
            int nin = 0;
            int nout = 0;
            int npoints = 0;
            double e = 0;
            int info = 0;
            int nless = 0;
            int nall = 0;
            int nclasses = 0;
            bool inferrors = new bool();
            bool procerrors = new bool();
            bool trnerrors = new bool();

            waserrors = false;
            inferrors = false;
            procerrors = false;
            trnerrors = false;
            passcount = 10;
            maxn = 4;
            maxhid = 4;
            
            //
            // General MLP ensembles tests
            //
            for(nf=1; nf<=maxn; nf++)
            {
                for(nl=1; nl<=maxn; nl++)
                {
                    for(nhid1=0; nhid1<=maxhid; nhid1++)
                    {
                        for(nhid2=0; nhid2<=0; nhid2++)
                        {
                            for(nkind=0; nkind<=3; nkind++)
                            {
                                for(ec=1; ec<=3; ec++)
                                {
                                    
                                    //
                                    //  Skip meaningless parameters combinations
                                    //
                                    if( nkind==1 & nl<2 )
                                    {
                                        continue;
                                    }
                                    if( nhid1==0 & nhid2!=0 )
                                    {
                                        continue;
                                    }
                                    
                                    //
                                    // Tests
                                    //
                                    testinformational(nkind, nf, nhid1, nhid2, nl, ec, passcount, ref inferrors);
                                    testprocessing(nkind, nf, nhid1, nhid2, nl, ec, passcount, ref procerrors);
                                }
                            }
                        }
                    }
                }
            }
            
            //
            // network training must reduce error
            // test on random regression task
            //
            nin = 3;
            nout = 2;
            nhid = 5;
            npoints = 100;
            nless = 0;
            nall = 0;
            for(pass=1; pass<=10; pass++)
            {
                for(algtype=0; algtype<=1; algtype++)
                {
                    for(tasktype=0; tasktype<=1; tasktype++)
                    {
                        if( tasktype==0 )
                        {
                            xy = new double[npoints-1+1, nin+nout-1+1];
                            for(i=0; i<=npoints-1; i++)
                            {
                                for(j=0; j<=nin+nout-1; j++)
                                {
                                    xy[i,j] = 2*math.randomreal()-1;
                                }
                            }
                            mlpe.mlpecreate1(nin, nhid, nout, 1+math.randominteger(3), ensemble);
                        }
                        else
                        {
                            xy = new double[npoints-1+1, nin+1];
                            nclasses = 2+math.randominteger(2);
                            for(i=0; i<=npoints-1; i++)
                            {
                                for(j=0; j<=nin-1; j++)
                                {
                                    xy[i,j] = 2*math.randomreal()-1;
                                }
                                xy[i,nin] = math.randominteger(nclasses);
                            }
                            mlpe.mlpecreatec1(nin, nhid, nclasses, 1+math.randominteger(3), ensemble);
                        }
                        e = mlpe.mlpermserror(ensemble, xy, npoints);
                        if( algtype==0 )
                        {
                            mlpe.mlpebagginglm(ensemble, xy, npoints, 0.001, 1, ref info, rep, oobrep);
                        }
                        else
                        {
                            mlpe.mlpebagginglbfgs(ensemble, xy, npoints, 0.001, 1, 0.01, 0, ref info, rep, oobrep);
                        }
                        if( info<0 )
                        {
                            trnerrors = true;
                        }
                        else
                        {
                            if( (double)(mlpe.mlpermserror(ensemble, xy, npoints))<(double)(e) )
                            {
                                nless = nless+1;
                            }
                        }
                        nall = nall+1;
                    }
                }
            }
            trnerrors = trnerrors | (double)(nall-nless)>(double)(0.3*nall);
            
            //
            // Final report
            //
            waserrors = (inferrors | procerrors) | trnerrors;
            if( !silent )
            {
                System.Console.Write("MLP ENSEMBLE TEST");
                System.Console.WriteLine();
                System.Console.Write("INFORMATIONAL FUNCTIONS:                 ");
                if( !inferrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                System.Console.Write("BASIC PROCESSING:                        ");
                if( !procerrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                System.Console.Write("TRAINING:                                ");
                if( !trnerrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                if( waserrors )
                {
                    System.Console.Write("TEST SUMMARY: FAILED");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("TEST SUMMARY: PASSED");
                    System.Console.WriteLine();
                }
                System.Console.WriteLine();
                System.Console.WriteLine();
            }
            result = !waserrors;
            return result;
        }
Ejemplo n.º 3
0
 public mlpcvreport()
 {
     _innerobj = new mlptrain.mlpcvreport();
 }
Ejemplo n.º 4
0
        public static bool testmlptrain(bool silent)
        {
            bool result = new bool();
            bool waserrors = new bool();
            int passcount = 0;
            int maxn = 0;
            int maxhid = 0;
            int info = 0;
            int nf = 0;
            int nl = 0;
            int nhid1 = 0;
            int nhid2 = 0;
            int nkind = 0;
            int i = 0;
            mlpbase.multilayerperceptron network = new mlpbase.multilayerperceptron();
            mlpbase.multilayerperceptron network2 = new mlpbase.multilayerperceptron();
            mlptrain.mlpreport rep = new mlptrain.mlpreport();
            mlptrain.mlpcvreport cvrep = new mlptrain.mlpcvreport();
            int ncount = 0;
            double[,] xy = new double[0,0];
            double[,] valxy = new double[0,0];
            bool inferrors = new bool();
            bool procerrors = new bool();
            bool graderrors = new bool();
            bool hesserrors = new bool();
            bool trnerrors = new bool();

            waserrors = false;
            inferrors = false;
            procerrors = false;
            graderrors = false;
            hesserrors = false;
            trnerrors = false;
            passcount = 10;
            maxn = 4;
            maxhid = 4;
            
            //
            // General multilayer network tests
            //
            for(nf=1; nf<=maxn; nf++)
            {
                for(nl=1; nl<=maxn; nl++)
                {
                    for(nhid1=0; nhid1<=maxhid; nhid1++)
                    {
                        for(nhid2=0; nhid2<=0; nhid2++)
                        {
                            for(nkind=0; nkind<=3; nkind++)
                            {
                                
                                //
                                //  Skip meaningless parameters combinations
                                //
                                if( nkind==1 & nl<2 )
                                {
                                    continue;
                                }
                                if( nhid1==0 & nhid2!=0 )
                                {
                                    continue;
                                }
                                
                                //
                                // Tests
                                //
                                testinformational(nkind, nf, nhid1, nhid2, nl, passcount, ref inferrors);
                                testprocessing(nkind, nf, nhid1, nhid2, nl, passcount, ref procerrors);
                                testgradient(nkind, nf, nhid1, nhid2, nl, passcount, ref graderrors);
                                testhessian(nkind, nf, nhid1, nhid2, nl, passcount, ref hesserrors);
                            }
                        }
                    }
                }
            }
            
            //
            // Test network training on simple XOR problem
            //
            xy = new double[3+1, 2+1];
            xy[0,0] = -1;
            xy[0,1] = -1;
            xy[0,2] = -1;
            xy[1,0] = 1;
            xy[1,1] = -1;
            xy[1,2] = 1;
            xy[2,0] = -1;
            xy[2,1] = 1;
            xy[2,2] = 1;
            xy[3,0] = 1;
            xy[3,1] = 1;
            xy[3,2] = -1;
            mlpbase.mlpcreate1(2, 2, 1, network);
            mlptrain.mlptrainlm(network, xy, 4, 0.001, 10, ref info, rep);
            trnerrors = trnerrors | (double)(mlpbase.mlprmserror(network, xy, 4))>(double)(0.1);
            
            //
            // Test CV on random noisy problem
            //
            ncount = 100;
            xy = new double[ncount-1+1, 1+1];
            for(i=0; i<=ncount-1; i++)
            {
                xy[i,0] = 2*math.randomreal()-1;
                xy[i,1] = math.randominteger(4);
            }
            mlpbase.mlpcreatec0(1, 4, network);
            mlptrain.mlpkfoldcvlm(network, xy, ncount, 0.001, 5, 10, ref info, rep, cvrep);
            
            //
            // Final report
            //
            waserrors = (((inferrors | procerrors) | graderrors) | hesserrors) | trnerrors;
            if( !silent )
            {
                System.Console.Write("MLP TEST");
                System.Console.WriteLine();
                System.Console.Write("INFORMATIONAL FUNCTIONS:                 ");
                if( !inferrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                System.Console.Write("BASIC PROCESSING:                        ");
                if( !procerrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                System.Console.Write("GRADIENT CALCULATION:                    ");
                if( !graderrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                System.Console.Write("HESSIAN CALCULATION:                     ");
                if( !hesserrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                System.Console.Write("TRAINING:                                ");
                if( !trnerrors )
                {
                    System.Console.Write("OK");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("FAILED");
                    System.Console.WriteLine();
                }
                if( waserrors )
                {
                    System.Console.Write("TEST SUMMARY: FAILED");
                    System.Console.WriteLine();
                }
                else
                {
                    System.Console.Write("TEST SUMMARY: PASSED");
                    System.Console.WriteLine();
                }
                System.Console.WriteLine();
                System.Console.WriteLine();
            }
            result = !waserrors;
            return result;
        }