示例#1
0
        /**Return a D x N matrix*/
        public static Matrix ToInputMatrix(RandomFeatureMap fm, List <IKEPDist[]> inputs)
        {
            Vector[] features = inputs.Select(msgs => fm.MapToVector(msgs)).ToArray();
            Matrix   m        = MatrixUtils.StackColumns(features);

            return(m);
        }
示例#2
0
 public override RandomFeatureMap Regenerate(int[] numFeatures)
 {
     RandomFeatureMap[] newMaps = new RandomFeatureMap[maps.Length];
     for (int i = 0; i < maps.Length; i++)
     {
         newMaps[i] = maps[i].Regenerate(numFeatures);
     }
     return(new StackFeatureMap(newMaps));
 }
示例#3
0
        /**
         * Evaluate the feature map with a leave-one-out cross validation.
         * Return a vector of squared loss values, each corresponding to a regularization
         * parameter in regList.
         * This implementation follows cond_fm_finiteout.m
         */
        public static double[] EvaluateFeatureMap(RandomFeatureMap fm,
                                                  double[] regList, List <IKEPDist[]> inputs, List <double> outputs)
        {
            // see http://numerics.mathdotnet.com/Matrix.html for how to use MathNet Matrix

            Matrix   xtemp = ToInputMatrix(fm, inputs);
            MNMatrix x     = MatrixUtils.ToMathNetMatrix(xtemp);

            double[] errs = new double[regList.Length];
            for (int i = 0; i < regList.Length; i++)
            {
                double lambda = regList[i];
            }
            throw new NotImplementedException();
        }
示例#4
0
        public static new BayesLinRegFM FromMatlabStruct(MatlabStruct s)
        {
//			s.className=class(this);
//			s.featureMap=this.featureMap.toStruct();
//			%s.regParam=this.regParam;
//			s.mapMatrix=this.mapMatrix;
//			s.posteriorCov = this.posteriorCov;
//			s.noise_var = this.noise_var;

            string className = s.GetString("className");

            if (!className.Equals(MATLAB_CLASS))
            {
                throw new ArgumentException("The input does not represent a " + MATLAB_CLASS);
            }
            MatlabStruct     fmStruct   = s.GetStruct("featureMap");
            RandomFeatureMap featureMap = RandomFeatureMap.FromMatlabStruct(fmStruct);
            // This is the same as a posterior mean
            Vector mapMatrix = s.Get1DVector("mapMatrix");

            if (mapMatrix.Count != featureMap.GetOutputDimension())
            {
                throw new ArgumentException("mapMatrix and featureMap's dimenions are incompatible.");
            }
            Matrix postCov = s.GetMatrix("posteriorCov");

            if (postCov.Cols != featureMap.GetOutputDimension())
            {
                throw new ArgumentException("posterior covariance and featureMap's dimenions are incompatible.");
            }
            double noise_var = s.GetDouble("noise_var");
            Vector crossCorr = s.Get1DVector("crossCorrelation");
            var    bayes     = new BayesLinRegFM();

            bayes.featureMap    = featureMap;
            bayes.posteriorMean = mapMatrix;
            bayes.posteriorCov  = postCov;
            bayes.noiseVar      = noise_var;
            bayes.crossCorr     = crossCorr;
            // No need to do the initial batch train because we loaded the result
            // from .mat.
            bayes.WillNeedInitialTrain = false;
            return(bayes);
        }
示例#5
0
        /**
         * Initialize an empty Bayesian linear regressor suitable for online
         * learning from scratch.
         */
        public BayesLinRegFM(RandomFeatureMap featureMap)
        {
//			if(noiseVar < 0){
//				throw new ArgumentException("Require noise variance >= 0");
//			}
//			this.noiseVar = noiseVar;
//			if(uThreshold < 0){
//				throw new ArgumentException("Require uncertainty threshold >= 0");
//			}
//			this.uThreshold = uThreshold;
            int D = featureMap.GetOutputDimension();

            this.featureMap = featureMap;
//			this.noiseVar = noiseVar;
//			this.uThreshold = uThreshold;
            this.posteriorMean = Vector.Zero(D);
            // assume that the prior for W is N(0, 1)
            this.posteriorCov = Matrix.IdentityScaledBy(D, 1.0);
            this.crossCorr    = Vector.Zero(D);
        }
示例#6
0
        public static RandomFeatureMap FromMatlabStruct(MatlabStruct s)
        {
            string           className = s.GetString("className");
            RandomFeatureMap map       = null;

            if (className.Equals(RFGJointKGG.MATLAB_CLASS))
            {
                map = RFGJointKGG.FromMatlabStruct(s);
            }
            else
            {
                throw new ArgumentException("Unknown className: " + className);
            }
            //			else if(className.Equals("RFGSumEProdMap")){
            //
            //			}else if(className.Equals("RFGEProdMap")){
            //
            //			}else if(className.Equals("RFGJointEProdMap")){
            //
            //			}else if(className.Equals("RFGProductEProdMap")){
            //
            //			}
            return(map);
        }
示例#7
0
        private void BatchLearn()
        {
            // Batch learning uses the collected messages. This will reset many
            // properties of the object.

            // TODO: full cross validation later.
            // For now, we will use median heuristic to set the parameter.
//			int[] inOutNumFeatures = { this.MinibatchInnerFeatures, this.MinibatchOuterFeatures };
            int[] inOutNumFeatures = { this.InnerFeatures, this.OuterFeatures };
//			int[] inOutNumFeatures = { 200, 400 };
//			int[] inOutNumFeatures = {400, 700};
//			int[] inOutNumFeatures = {50, 50};
            double[]                medianFactors = { 0.5 };
            Random                  rng           = new Random(1);
            List <IKEPDist[]>       inputs        = this.batchInputs;
            List <RandomFeatureMap> candidates    = featureMap.GenCandidates(
                inputs, inOutNumFeatures, medianFactors, rng);

            /*
             * unfinished cross validation implementation
             * double[] noiseVarCandidates = new double[]{1e-4, 1e-3, 1e-2};
             * var M = MNMatrix.Build;
             * int n = inputs.Count;
             * MNVector Y = MNVector.Build.Dense(batchOutputs.ToArray());
             * double[][] looMSErrs = new double[candidates.Count][];
             * // TODO: Improve this with parallel for-loop ?
             * Console.WriteLine("#### Performing initial batch learning ####");
             * Console.WriteLine();
             *
             * for(int i=0; i<candidates.Count; i++){
             *      RandomFeatureMap fm = candidates[i];
             *      int d = fm.GetOutputDimension();
             *      MNMatrix Idd = M.SparseIdentity(d);
             *      // D x N matrix
             *      MNMatrix phi = fm.GenFeaturesMNMat(inputs);
             *      MNMatrix ppt = phi.TransposeAndMultiply(phi);
             *      looMSErrs[i] = new double[noiseVarCandidates.Length];
             *      for(int nj=0; nj<noiseVarCandidates.Length; nj++){
             *              double noiseVariance = noiseVarCandidates[nj];
             *              MNMatrix regI = M.SparseDiagonal(d, noiseVariance);
             *              MNMatrix A = ppt + regI;
             *              MNMatrix X = A.Solve(phi);
             *              Debug.Assert(X.RowCount == d);
             *              Debug.Assert(X.ColumnCount == n);
             *              // TODO: H does not need to be formed explicitly.
             *              // This is efficient if n > d. Later.
             *              MNMatrix H = Idd - phi.TransposeThisAndMultiply(X);
             *              MNVector HDiagInv = H.Diagonal();
             *              HDiagInv.MapInplace(delegate(double v){
             *                      return 1.0/v;
             *              });
             *              double errSqrt = H.LeftMultiply(Y).PointwiseMultiply(HDiagInv).L2Norm();
             *              looMSErrs[i][nj] = errSqrt*errSqrt/n;
             *
             *              Console.WriteLine("");
             *      }
             * }
             */
            this.featureMap = candidates[0];
            this.noiseVar   = 1e-4;
            const double priorVariance = 1.0;

//			this.featureMap = fm;
            // threshold on log predict variance
            // Used -8.5 for the logistic regression problem
//			this.uThreshold = -8.5;
            Vector[] features = inputs.Select(msgs => featureMap.MapToVector(msgs)).ToArray();
            Matrix   x        = MatrixUtils.StackColumns(features);
            Vector   y        = Vector.FromList(batchOutputs);

            int Dout = x.Rows;
            // Matrix x is d x n
            Matrix xxt = x * x.Transpose();

            crossCorr = x * y;

            Matrix postPrec = xxt * (1.0 / noiseVar) + Matrix.IdentityScaledBy(Dout, 1.0 / priorVariance);

            this.posteriorCov  = MatrixUtils.Inverse(postPrec);
            this.posteriorMean = this.posteriorCov * crossCorr * (1.0 / this.noiseVar);
            if (double.IsNaN(uThreshold))
            {
                uThreshold = -8.5;
            }

//			throw new NotImplementedException();
        }