예제 #1
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
        /// <summary>
        /// Max difference between two sparse GPs - used for
        /// convergence testing
        /// </summary>
        /// <param name="thatd">That sparse GP which will be compared to this sparse GP</param>
        /// <returns></returns>
        public double MaxDiff(object thatd)
        {
            SparseGP that = thatd as SparseGP;

            // Prior mean and kernel references should be the same
            // Low rank lists are ignored
            if (that == null ||
                this.FixedParameters != that.FixedParameters ||
                this.IncludePrior != that.IncludePrior ||
                this.IsPointMass != that.IsPointMass ||
                this.IsUniform() != that.IsUniform())
            {
                return(double.PositiveInfinity);
            }
            if (this.IsUniform() && that.IsUniform())
            {
                return(0.0);
            }
            if (this.IsPointMass)
            {
                // both point masses
                if (this.Point is Diffable)
                {
                    return(((Diffable)this.Point).MaxDiff(that.Point));
                }
                else
                {
                    return((this.Point == that.Point) ? 0.0 : double.PositiveInfinity);
                }
            }
            return(this.InducingDist.MaxDiff(that.InducingDist));
        }
예제 #2
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
        /// <summary>
        /// Creates a new SparseGP which the product of two other SparseGPs
        /// </summary>
        /// <param name="a">First SparseGP</param>
        /// <param name="b">Second SparseGP</param>
        /// <returns>Result</returns>
        public static SparseGP operator *(SparseGP a, SparseGP b)
        {
            SparseGP result = new SparseGP(a.FixedParameters);

            result.SetToProduct(a, b);
            return(result);
        }
예제 #3
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
        /// <summary>
        /// Creates a new SparseGP which the ratio of two other SparseGPs
        /// </summary>
        /// <param name="numerator">numerator SparseGP</param>
        /// <param name="denominator">denominator SparseGP</param>
        /// <returns>Result</returns>
        public static SparseGP operator /(SparseGP numerator, SparseGP denominator)
        {
            SparseGP result = new SparseGP(numerator.FixedParameters);

            result.SetToRatio(numerator, denominator);
            return(result);
        }
예제 #4
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
        /// <summary>
        /// Creates a sparse GP point mass - i.e. all the mass is at a given function
        /// </summary>
        /// <param name="sgpf"></param>
        /// <param name="value"></param>
        /// <returns></returns>
        public static SparseGP PointMass(SparseGPFixed sgpf, IFunction value)
        {
            SparseGP sgp = new SparseGP(sgpf, false);

            sgp.Point = value;
            return(sgp);
        }
예제 #5
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// Sets this instance to the product of two sparse GPs.
 /// </summary>
 /// <param name="a">Sparse GP</param>
 /// <param name="b">Sparse GP</param>
 public void SetToProduct(SparseGP a, SparseGP b)
 {
     if (a.FixedParameters != b.FixedParameters)
     {
         throw new ArgumentException("SparseGPs do not have the same FixedParameters.  a.FixedParameters = " + a.FixedParameters + ", b.FixedParameters = " +
                                     b.FixedParameters);
     }
     FixedParameters = a.FixedParameters;
     if (a.IncludePrior && b.IncludePrior)
     {
         throw new ArgumentException("Both SparseGPs include the prior.  Cannot multiply.");
     }
     IncludePrior = a.IncludePrior || b.IncludePrior;
     if (a.IsPointMass)
     {
         if (b.IsPointMass && !a.Point.Equals(b.Point))
         {
             throw new AllZeroException();
         }
         Point = a.Point;
     }
     else if (b.IsPointMass)
     {
         Point = b.Point;
     }
     else
     {
         InducingDist.SetToProduct(a.InducingDist, b.InducingDist);
         pointFunc = null;
         ClearCachedValues();
     }
 }
예제 #6
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
        /// <summary>
        /// Sets this SparseGP distribution to the weighted sum of two other such distributions
        /// </summary>
        /// <param name="weight1"></param>
        /// <param name="value1"></param>
        /// <param name="weight2"></param>
        /// <param name="value2"></param>
        /// <remarks>Not yet implemented</remarks>
        public void SetToSum(double weight1, SparseGP value1, double weight2, SparseGP value2)
        {
            if (value1.FixedParameters != value2.FixedParameters)
            {
                throw new ArgumentException("SparseGPs do not have the same FixedParameters.  a.FixedParameters = " + value1.FixedParameters + ", b.FixedParameters = " +
                                            value2.FixedParameters);
            }
            FixedParameters = value1.FixedParameters;
            if (value1.IncludePrior != value2.IncludePrior)
            {
                throw new ArgumentException("One Sparse GP includes a prior, the other does not.  Cannot add.");
            }
            IncludePrior = value1.IncludePrior;

            InducingDist.SetToSum(weight1, value1.InducingDist, weight2, value2.InducingDist);
            // The only time the result is a point mass is if both sources are the same point mass
            if (InducingDist.IsPointMass)
            {
                pointFunc = value1.pointFunc;
            }
            else
            {
                pointFunc = null;
            }
            ClearCachedValues();
        }
예제 #7
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// k(x)
 /// </summary>
 public double K_x_x(SparseGP sgpb)
 {
     if (double.IsNaN(kxx))
     {
         kxx = sgpb.FixedParameters.Prior.Variance(Xi);
     }
     return(kxx);
 }
예제 #8
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// K(B,x). This is a calculated Vector maintained
 /// by the class
 /// </summary>
 public Vector K_B_x(SparseGP sgpb)
 {
     if (kBx == null)
     {
         if (Xi != null)
         {
             kBx = sgpb.FixedParameters.KernelOf_X_B(Xi);
         }
     }
     return(kBx);
 }
예제 #9
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// p = Inv(K(B,B)) * K(B,x). This is a calculated Vector maintained
 /// by the class
 /// </summary>
 public Vector P(SparseGP sgpb)
 {
     if (pvec == null)
     {
         Vector KBx = K_B_x(sgpb);
         if (KBx != null)
         {
             pvec = sgpb.FixedParameters.InvKernelOf_B_B * KBx;
         }
     }
     return(pvec);
 }
예제 #10
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// Sets this instance to the ratio of two sparse GPs.
 /// </summary>
 /// <param name="numerator">Sparse GP</param>
 /// <param name="denominator">Sparse GP</param>
 /// <param name="forceProper"></param>
 public void SetToRatio(SparseGP numerator, SparseGP denominator, bool forceProper = false)
 {
     if (numerator.FixedParameters != denominator.FixedParameters)
     {
         throw new ArgumentException("SparseGPs do not have the same FixedParameters.  numerator.FixedParameters = " + numerator.FixedParameters +
                                     ", denominator.FixedParameters = " + denominator.FixedParameters);
     }
     FixedParameters = numerator.FixedParameters;
     if (numerator.IncludePrior)
     {
         IncludePrior = !denominator.IncludePrior;
     }
     else if (denominator.IncludePrior)
     {
         throw new ArgumentException("Only the denominator includes the prior.  Cannot divide.");
     }
     else
     {
         IncludePrior = false;  // neither include the prior
     }
     if (numerator.IsPointMass)
     {
         if (denominator.IsPointMass)
         {
             if (numerator.Point.Equals(denominator.Point))
             {
                 SetToUniform();
             }
             else
             {
                 throw new DivideByZeroException();
             }
         }
         else
         {
             Point = numerator.Point;
         }
     }
     else if (denominator.IsPointMass)
     {
         throw new DivideByZeroException();
     }
     else
     {
         // neither is point mass
         InducingDist.SetToRatio(numerator.InducingDist, denominator.InducingDist, forceProper);
         pointFunc = null;
         ClearCachedValues();
     }
 }
예제 #11
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
        /// <summary>
        /// Sets one sparse GP to another. Everything is copied
        /// except the FixedParameters and the lsit of rank 1 potentials
        /// which are referenced.
        /// </summary>
        /// <param name="that">The sparse GP to copy</param>
        public void SetTo(SparseGP that)
        {
            fixedParameters = that.FixedParameters;
            InducingDist.SetTo(that.InducingDist);
            IncludePrior = that.IncludePrior;
            pointFunc    = that.pointFunc;

            if (that.alpha != null)
            {
                alpha = Vector.Copy(that.alpha);
            }
            else
            {
                alpha = null;
            }

            if (that.beta != null)
            {
                beta = new PositiveDefiniteMatrix(that.beta as Matrix);
            }
            else
            {
                beta = null;
            }

            if (that.meanB != null)
            {
                meanB = Vector.Copy(that.meanB);
            }
            else
            {
                meanB = null;
            }

            if (that.varBB != null)
            {
                varBB = new PositiveDefiniteMatrix(that.varBB as Matrix);
            }
            else
            {
                varBB = null;
            }
        }
예제 #12
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// Get the integral of this distribution times another distribution raised to a power.
 /// </summary>
 /// <param name="that"></param>
 /// <param name="power"></param>
 /// <returns></returns>
 public double GetLogAverageOfPower(SparseGP that, double power)
 {
     throw new NotImplementedException();
     if (IsPointMass)
     {
         return(power * that.GetLogProb(Point));
     }
     else if (that.IsPointMass)
     {
         if (power < 0)
         {
             throw new DivideByZeroException("The exponent is negative and the distribution is a point mass");
         }
         return(this.GetLogProb(that.Point));
     }
     else
     {
     }
 }
예제 #13
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
#pragma warning disable 162

        /// <summary>
        /// Gets the log of the integral of the product of this SparseGP and that SparseGP
        /// </summary>
        /// <param name="that"></param>
        /// <returns></returns>
        public double GetLogAverageOf(SparseGP that)
        {
            if (this.FixedParameters != that.FixedParameters)
            {
                throw new ArgumentException("SparseGPs do not have the same FixedParameters.  this.FixedParameters = " + this.FixedParameters + ", that.FixedParameters = " +
                                            that.FixedParameters);
            }
            if (this.IncludePrior && that.IncludePrior)
            {
                throw new ArgumentException("Both SparseGPs include the prior");
            }
            if (that.IsPointMass)
            {
                return(GetLogProb(that.Point));
            }
            if (this.IsPointMass)
            {
                return(that.GetLogProb(this.Point));
            }
            if (this.IncludePrior && !that.IncludePrior)
            {
                // gBB is the distribution of the function on the basis
                VectorGaussian gBB;
                if (true)
                {
                    gBB = new VectorGaussian(InducingDist.Dimension);
                    gBB.Precision.SetToSum(FixedParameters.InvKernelOf_B_B, InducingDist.Precision);
                    gBB.MeanTimesPrecision.SetTo(InducingDist.MeanTimesPrecision); // since prior has zero mean
                }
                else
                {
                    // equivalent but slower
                    gBB = VectorGaussian.FromMeanAndVariance(Mean_B, Var_B_B);
                }
                return(gBB.GetLogAverageOf(that.InducingDist));
            }
            if (!this.IncludePrior && that.IncludePrior)
            {
                return(that.GetLogAverageOf(this));
            }
            throw new NotImplementedException();
        }
예제 #14
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// Sets this sparse GP the the power of another sparse GP
 /// </summary>
 /// <param name="dist"></param>
 /// <param name="exponent"></param>
 public void SetToPower(SparseGP dist, double exponent)
 {
     if (exponent == 1.0)
     {
         SetTo(dist);
     }
     else
     {
         FixedParameters = dist.FixedParameters;
         if (exponent == 0.0)
         {
             SetToUniform();
         }
         else if (dist.IsPointMass)
         {
             if (exponent < 0)
             {
                 throw new DivideByZeroException("The exponent is negative and the distribution is a point mass");
             }
             else
             {
                 Point = dist.Point;
             }
         }
         else if (dist.IncludePrior)
         {
             throw new ArgumentException("Cannot raise prior to a power.");
         }
         else
         {
             IncludePrior = dist.IncludePrior;
             InducingDist.SetToPower(dist.InducingDist, exponent);
             pointFunc = null;
             ClearCachedValues();
         }
     }
 }
예제 #15
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
 /// <summary>
 /// Copy constructor
 /// </summary>
 /// <param name="that"></param>
 public SparseGP(SparseGP that)
 {
     InducingDist = new VectorGaussian(that.FixedParameters.NumberBasisPoints);
     SetTo(that);
 }
예제 #16
0
파일: SparseGP.cs 프로젝트: 0xCM/arrows
#pragma warning restore 162

        /// <summary>
        /// The expected logarithm of that distribution under this distribution
        /// </summary>
        /// <param name="that"></param>
        /// <returns></returns>
        /// <remarks>Not yet implemented</remarks>
        public double GetAverageLog(SparseGP that)
        {
            throw new NotImplementedException();
        }