public static void NoiseTest(double noiseVariance) { int d = 2; int n = 1000; // generate data var N0I = new VectorGaussian(Vector.Zero(d), PositiveDefiniteMatrix.Identity(d)); var wTrue = N0I.Sample(); Normalize(wTrue); Vector[] x = new Vector[n]; bool[] y = new bool[n]; for (int i = 0; i < n; i++) { x[i] = N0I.Sample(); y[i] = (x[i].Inner(wTrue) + Gaussian.Sample(0, 1.0 / noiseVariance)) > 0.0; } // evaluate models var fixedNoise = new BPM_FixedNoise(d, n); var noiseRange = new double[] { 1, 2, 10, 20, 30, 100, 1000, 1e4 }; foreach (double noiseTrain in noiseRange) { Vector wTrain = fixedNoise.Train(x, y, noiseTrain); Normalize(wTrain); double err = System.Math.Acos(wTrain.Inner(wTrue)) / System.Math.PI; //double err = Math.Sqrt(wTrue.Inner(wTrue) -2*wTrue.Inner(wTrain) + wTrain.Inner(wTrain)); Console.WriteLine("noiseTrain = {0}, error = {1}", noiseTrain, err); } }
public DistributionArray <Bernoulli> TestBPM_LabelNoise(double initCount, out double noiseEstimate) { noiseEstimate = double.NaN; // data int K = xtrain[0].Count; // Create target y VariableArray <bool> y = Variable.Observed(ytrain).Named("y"); Variable <Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(K), PositiveDefiniteMatrix.Identity(K))).Named("w"); var ProbTrue = Variable.Beta(1, initCount).Named("TPR"); var ProbFalse = Variable.Beta(initCount, 1).Named("FNR"); var mean = Variable.GaussianFromMeanAndPrecision(0, .1).Named("mean"); BayesPointMachine_LabelNoise(xtrain, w, y, ProbTrue, ProbFalse, mean); InferenceEngine engine = new InferenceEngine(); VectorGaussian wPosterior = engine.Infer <VectorGaussian>(w); Beta probTruePost = engine.Infer <Beta>(ProbTrue); Beta probFalsePost = engine.Infer <Beta>(ProbFalse); //Console.WriteLine("Dist over w=\n" + wPosterior); //Console.WriteLine("Dist over p_t=\n" + probTruePost); //Console.WriteLine("Dist over p_f=\n" + probFalsePost); var meanPost = engine.Infer <Gaussian>(mean); VariableArray <bool> yt = Variable.Array <bool>(new Range(ytest.Length)).Named("ytest"); BayesPointMachine_LabelNoise(xtest, Variable.Random(wPosterior).Named("w"), yt, Variable.Random(probTruePost), Variable.Random(probFalsePost), Variable.Random(meanPost)); return(engine.Infer <DistributionArray <Bernoulli> >(yt)); }
/// <summary> /// For the multinomial regression model: generate synthetic data, /// infer the model parameters and calculate the RMSE between the true /// and mean inferred coefficients. /// </summary> public void Run() { // This example requires VMP InferenceEngine engine = new InferenceEngine(); if (!(engine.Algorithm is Algorithms.VariationalMessagePassing)) { Console.WriteLine("This example only runs with Variational Message Passing"); return; } int numSamples = 1000; int numFeatures = 6; int numClasses = 4; int countPerSample = 10; var features = new Vector[numSamples]; var counts = new int[numSamples][]; var coefficients = new Vector[numClasses]; var mean = Vector.Zero(numClasses); Rand.Restart(1); for (int i = 0; i < numClasses - 1; i++) { mean[i] = Rand.Normal(); coefficients[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), coefficients[i]); } mean[numClasses - 1] = 0; coefficients[numClasses - 1] = Vector.Zero(numFeatures); for (int i = 0; i < numSamples; i++) { features[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), features[i]); var temp = Vector.FromArray(coefficients.Select(o => o.Inner(features[i])).ToArray()); var p = MMath.Softmax(temp + mean); counts[i] = Rand.Multinomial(countPerSample, p); } Rand.Restart(DateTime.Now.Millisecond); VectorGaussian[] bPost; Gaussian[] meanPost; MultinomialRegressionModel(features, counts, out bPost, out meanPost); var bMeans = bPost.Select(o => o.GetMean()).ToArray(); var bVars = bPost.Select(o => o.GetVariance()).ToArray(); double error = 0; Console.WriteLine("Coefficients -------------- "); for (int i = 0; i < numClasses; i++) { error += (bMeans[i] - coefficients[i]).Sum(o => o * o); Console.WriteLine("Class {0} True {1}", i, coefficients[i]); Console.WriteLine("Class {0} Inferred {1}", i, bMeans[i]); } Console.WriteLine("Mean True " + mean); Console.WriteLine("Mean Inferred " + Vector.FromArray(meanPost.Select(o => o.GetMean()).ToArray())); error = System.Math.Sqrt(error / (numClasses * numFeatures)); Console.WriteLine("RMSE is {0}", error); }
public static PositiveDefiniteMatrix DiagonalPDMatrix(double[] v) { var mat = PositiveDefiniteMatrix.Identity(v.Length); mat.SetDiagonal(Vector.FromArray(v)); return(mat); }
public DistributionArray <Bernoulli> TestNaiveBayes(double a, out double noiseEstimate) { noiseEstimate = double.NaN; int K = xtrain[0].Count; // Create target y VariableArray <bool> y = Variable.Observed(ytrain).Named("y"); Variable <Vector> meanTrue = Variable.Random(new VectorGaussian(Vector.Zero(K), PositiveDefiniteMatrix.Identity(K))).Named("m1"); Variable <Vector> meanFalse = Variable.Random(new VectorGaussian(Vector.Zero(K), PositiveDefiniteMatrix.Identity(K))).Named("m2"); var precTrue = Variable.Random(new Wishart(a, PositiveDefiniteMatrix.Identity(K))); var precFalse = Variable.Random(new Wishart(a, PositiveDefiniteMatrix.Identity(K))); NaiveBayes(xtrain, meanTrue, meanFalse, precTrue, precFalse, y); //InferenceEngine.DefaultEngine.Compiler.UseSerialSchedules = true; InferenceEngine engine = new InferenceEngine(new VariationalMessagePassing()); var meanTruePost = engine.Infer <VectorGaussian>(meanTrue); var meanFalsePost = engine.Infer <VectorGaussian>(meanFalse); var precTruePost = engine.Infer <Wishart>(precTrue); var precFalsePost = engine.Infer <Wishart>(precFalse); var testRange = new Range(ytest.Length); VariableArray <bool> yt = Variable.Array <bool>(testRange).Named("ytest"); yt[testRange] = Variable.Bernoulli(0.5).ForEach(testRange); NaiveBayes(xtest, Variable.Random(meanTruePost), Variable.Random(meanFalsePost), Variable.Random <PositiveDefiniteMatrix>(precTruePost), Variable.Random <PositiveDefiniteMatrix>(precFalsePost), yt); return(engine.Infer <DistributionArray <Bernoulli> >(yt)); }
public void Run() { // data double[] incomes = { 63, 16, 28, 55, 22, 20 }; double[] ages = { 38, 23, 40, 27, 18, 40 }; bool[] willBuy = { true, false, true, true, false, false }; // Create target y VariableArray <bool> y = Variable.Observed(willBuy).Named("y"); Variable <Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(3), PositiveDefiniteMatrix.Identity(3))).Named("w"); BayesPointMachine(incomes, ages, w, y); InferenceEngine engine = new InferenceEngine(); if (!(engine.Algorithm is GibbsSampling)) { VectorGaussian wPosterior = engine.Infer <VectorGaussian>(w); Console.WriteLine("Dist over w=\n" + wPosterior); double[] incomesTest = { 58, 18, 22 }; double[] agesTest = { 36, 24, 37 }; VariableArray <bool> ytest = Variable.Array <bool>(new Range(agesTest.Length)).Named("ytest"); BayesPointMachine(incomesTest, agesTest, Variable.Random(wPosterior).Named("w"), ytest); Console.WriteLine("output=\n" + engine.Infer(ytest)); } else { Console.WriteLine("This model has a non-conjugate factor, and therefore cannot use Gibbs sampling"); } }
public static void TestDeterminant() { Matrix m1 = Matrix.Parse("6 0 \n 0 1"); Matrix m2 = Matrix.Parse("3.0 2.0 \n 1.5 3.0"); Matrix m3 = Matrix.Parse("0.7493 0.5074\n -0.004 -0.4204"); Matrix m4 = PositiveDefiniteMatrix.Identity(4); Console.WriteLine(m1); // expect 6 Console.WriteLine("m1 det: {0}", m1.Determinant()); // expect 6 Console.WriteLine("m2 det: {0}", m2.Determinant()); // expect -0.313 Console.WriteLine("m3 det: {0}", m3.Determinant()); Console.WriteLine("m4 det: {0}", m4.Determinant()); // expect 6 Console.WriteLine("m1 my det: {0}", MatrixUtils.Determinant(m1)); // expect 6 Console.WriteLine("m2 my det: {0}", MatrixUtils.Determinant(m2)); // expect -0.313 Console.WriteLine("m3 my det: {0}", MatrixUtils.Determinant(m3)); Console.WriteLine("m4 my det: {0}", MatrixUtils.Determinant(m4)); // Use MathNet MNMatrix m5 = MNMatrix.Build.Dense(2, 2, new double[] { 6, 0, 0, 1 }); Console.WriteLine(m5); Console.WriteLine("m5 det: {0}", m5.Determinant()); }
public DistributionArray <Bernoulli> TestBPM_NoiseVariancePlusLabel(double noiseRate, out double noiseEstimate) { int K = xtrain[0].Count; // Create target y VariableArray <bool> y = Variable.Observed(ytrain).Named("y"); Variable <Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(K), PositiveDefiniteMatrix.Identity(K))).Named("w"); var ProbTrue = Variable.Beta(1, 10).Named("TPR"); var ProbFalse = Variable.Beta(10, 1).Named("FNR"); var mean = Variable.GaussianFromMeanAndPrecision(0, .1); var noise = Variable.GammaFromShapeAndRate(2, noiseRate); //var noise = Variable.Random(Gamma.PointMass(.1)); BayesPointMachine_NoiseVariancePlusLabel(xtrain, w, y, mean, noise, ProbTrue, ProbFalse); InferenceEngine engine = new InferenceEngine(); VectorGaussian wPosterior = engine.Infer <VectorGaussian>(w); var noisePost = engine.Infer <Gamma>(noise); noiseEstimate = noisePost.GetMean(); Beta probTruePost = engine.Infer <Beta>(ProbTrue); Beta probFalsePost = engine.Infer <Beta>(ProbFalse); //Console.WriteLine("Dist over w=\n" + wPosterior); //Console.WriteLine("Dist over noise=\n" + noisePost); var meanPost = engine.Infer <Gaussian>(mean); VariableArray <bool> yt = Variable.Array <bool>(new Range(ytest.Length)).Named("ytest"); BayesPointMachine_NoiseVariancePlusLabel(xtest, Variable.Random(wPosterior).Named("w"), yt, Variable.Random(meanPost), Variable.Random(noisePost), Variable.Random(probTruePost), Variable.Random(probFalsePost)); return(engine.Infer <DistributionArray <Bernoulli> >(yt)); }
private Variable <Vector> InitialiseWeights( DistributionType distributionType, DistributionName distributionName, int dimension, string[] hyperParameters) { switch (distributionName) { case DistributionName.GaussianDefault: return(Variable.Random(new VectorGaussian( Vector.Zero(dimension), PositiveDefiniteMatrix.Identity(dimension))).Named( "w." + distributionType.ToString())); case DistributionName.GaussianInit: return(Variable <Vector> .Random( Variable.New <VectorGaussian>().Named( "w." + distributionType.ToString()))); default: TraceListeners.Log(TraceEventType.Error, 0, "Invalid distribution name: " + distributionName.ToString(), true, true); return(null); } }
private void InnerProductModel() { Vector a = Factor.Random(new VectorGaussian(Vector.FromArray(0.1, 0.2), PositiveDefiniteMatrix.Identity(2))); Vector b = Factor.Random(new VectorGaussian(Vector.FromArray(0.3, 0.4), PositiveDefiniteMatrix.Identity(2))); double c = Vector.InnerProduct(a, b); InferNet.Infer(c, nameof(c)); }
/// <summary> /// For the multinomial regression model: generate synthetic data, /// infer the model parameters and calculate the RMSE between the true /// and mean inferred coefficients. /// </summary> /// <param name="numSamples">Number of samples</param> /// <param name="numFeatures">Number of input features</param> /// <param name="numClasses">Number of classes</param> /// <param name="countPerSample">Total count in multinomial distributions /// per sample</param> /// <returns>RMSE between the true and mean inferred coefficients</returns> public double MultinomialRegressionSynthetic( int numSamples, int numFeatures, int numClasses, int countPerSample) { // array of Vector's. Each example is a Vector. var features = new Vector[numSamples]; var counts = new int[numSamples][]; // array of Vector's of true coefficients of each class var coefficients = new Vector[numClasses]; var mean = Vector.Zero(numClasses); Rand.Restart(1); for (int i = 0; i < numClasses - 1; i++) { mean[i] = Rand.Normal(); // initialize with a zero vector object coefficients[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), coefficients[i]); } mean[numClasses - 1] = 0; // last class's coefficient vector = zero vector coefficients[numClasses - 1] = Vector.Zero(numFeatures); for (int i = 0; i < numSamples; i++) { features[i] = Vector.Zero(numFeatures); // samples are from standard multivariate normal Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), features[i]); // var temp = Vector.FromArray(coefficients.Select(o => o.Inner(features[i])).ToArray()); var temp = Vector.FromArray( (from o in coefficients select o.Inner(features[i])).ToArray() ); var p = MMath.Softmax(temp + mean); counts[i] = Rand.Multinomial(countPerSample, p); } Rand.Restart(DateTime.Now.Millisecond); VectorGaussian[] bPost; Gaussian[] meanPost; MultinomialRegression(features, counts, out bPost, out meanPost); var bMeans = bPost.Select(o => o.GetMean()).ToArray(); var bVars = bPost.Select(o => o.GetVariance()).ToArray(); double error = 0; Console.WriteLine("Coefficients -------------- "); for (int i = 0; i < numClasses; i++) { error += (bMeans[i] - coefficients[i]).Sum(o => o * o); Console.WriteLine("True " + coefficients[i]); Console.WriteLine("Inferred " + bMeans[i]); } Console.WriteLine("Mean -------------- "); Console.WriteLine("True " + mean); Console.WriteLine("Inferred " + Vector.FromArray(meanPost.Select(o => o.GetMean()).ToArray())); error = Math.Sqrt(error / (numClasses * numFeatures)); Console.WriteLine(numSamples + " " + error); return(error); }
/// <summary> /// For the multinomial regression model: generate synthetic data, /// infer the model parameters and calculate the RMSE between the true /// and mean inferred coefficients. /// </summary> /// <param name="numSamples">Number of samples</param> /// <param name="numFeatures">Number of input features</param> /// <param name="numClasses">Number of classes</param> /// <param name="countPerSample">Total count per sample</param> /// <returns>RMSE between the true and mean inferred coefficients</returns> public double MultinomialRegressionSynthetic( int numSamples, int numFeatures, int numClasses, int countPerSample, double noiseVar = 0.0) { var features = new Vector[numSamples]; var counts = new int[numSamples][]; var coefficients = new Vector[numClasses]; var bias = Vector.Zero(numClasses); Rand.Restart(1); for (int i = 0; i < numClasses - 1; i++) { bias[i] = Rand.Normal(); coefficients[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), coefficients[i]); } bias[numClasses - 1] = 0; coefficients[numClasses - 1] = Vector.Zero(numFeatures); var noiseDistribution = new VectorGaussian(Vector.Zero(numClasses), PositiveDefiniteMatrix.IdentityScaledBy(numClasses, noiseVar)); for (int i = 0; i < numSamples; i++) { features[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), features[i]); var temp = Vector.FromArray(coefficients.Select(o => o.Inner(features[i])).ToArray()); if (noiseVar != 0.0) { temp += noiseDistribution.Sample(); } var p = MMath.Softmax(temp + bias); counts[i] = Rand.Multinomial(countPerSample, p); } IList <VectorGaussian> weightsPost; IList <Gaussian> biasPost; bool trackLowerBound = true; double ev = MultinomialRegression(features, counts, out weightsPost, out biasPost, trackLowerBound); if (trackLowerBound) { Console.WriteLine("Log lower bound= " + ev); } double error = 0; Console.WriteLine("Weights -------------- "); for (int i = 0; i < numClasses; i++) { var bMean = weightsPost[i].GetMean(); error += (bMean - coefficients[i]).Sum(o => o * o); Console.WriteLine("Class " + i + " True " + coefficients[i]); Console.WriteLine("Class " + i + " Inferred " + bMean); } error = System.Math.Sqrt(error / (numClasses * numFeatures)); Console.WriteLine("RMSE " + error); Console.WriteLine("Bias -------------- "); Console.WriteLine("True " + bias); Console.WriteLine("Inferred " + Vector.FromArray(biasPost.Select(o => o.GetMean()).ToArray())); return(error); }
private void VectorGaussianModel() { Vector mm = Vector.FromArray(0.1, 0.2); Vector m = Factor.Random(VectorGaussian.FromMeanAndPrecision(mm, PositiveDefiniteMatrix.Identity(2))); PositiveDefiniteMatrix p = Factor.Random(Wishart.FromShapeAndRate(2, 1.0, 1.0)); Vector c = Factor.VectorGaussian(m, p); InferNet.Infer(c, nameof(c)); }
public VectorGaussian Train(double[] incomes, double[] ages, bool[] ydata) { var y = Variable.Observed(ydata); var j = y.Range; var x = SetupFeatures(incomes, ages, j); var w = Variable.Random(new VectorGaussian(Vector.Zero(3), PositiveDefiniteMatrix.Identity(3))); y[j] = Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(w, x[j]), noise) > 0; return((VectorGaussian)InferenceEngine.Infer(w)); }
public double MulticlassRegressionSynthetic(int numSamples, object softmaxOperator, out int iterations, out double lowerBound, double noiseVar = 0.0) { int numFeatures = 6; int numClasses = 4; var features = new Vector[numSamples]; var counts = new int[numSamples]; var coefficients = new Vector[numClasses]; var mean = Vector.Zero(numClasses); for (int i = 0; i < numClasses - 1; i++) { mean[i] = Rand.Normal(); coefficients[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), coefficients[i]); } mean[numClasses - 1] = 0; coefficients[numClasses - 1] = Vector.Zero(numFeatures); var noiseDistribution = new VectorGaussian(Vector.Zero(numClasses), PositiveDefiniteMatrix.IdentityScaledBy(numClasses, noiseVar)); for (int i = 0; i < numSamples; i++) { features[i] = Vector.Zero(numFeatures); Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), features[i]); var temp = Vector.FromArray(coefficients.Select(o => o.Inner(features[i])).ToArray()); if (noiseVar != 0.0) { temp += noiseDistribution.Sample(); } var p = MMath.Softmax(temp + mean); counts[i] = Rand.Sample(p); } Rand.Restart(DateTime.Now.Millisecond); VectorGaussian[] bPost; Gaussian[] meanPost; iterations = MulticlassRegression(features, counts, numClasses, out bPost, out meanPost, out lowerBound, softmaxOperator, true); var bMeans = bPost.Select(o => o.GetMean()).ToArray(); var bVars = bPost.Select(o => o.GetVariance()).ToArray(); double error = 0; Console.WriteLine("Coefficients -------------- "); for (int i = 0; i < numClasses; i++) { error += (bMeans[i] - coefficients[i]).Sum(o => o * o); Console.WriteLine("True " + coefficients[i]); Console.WriteLine("Inferred " + bMeans[i]); } Console.WriteLine("Mean -------------- "); Console.WriteLine("True " + mean); Console.WriteLine("Inferred " + Vector.FromArray(meanPost.Select(o => o.GetMean()).ToArray())); error = System.Math.Sqrt(error / (numClasses * numFeatures)); Console.WriteLine(numSamples + " " + error); return(error); }
internal void ProbabilisticIndexMap() { //TODO: change the path for cross platform using double[,] dataIn = MatlabReader.ReadMatrix(new double[10, 6400 * 3], @"c:\temp\pim\chand.txt", ' '); Vector[,] pixData = new Vector[10, 6400]; for (int i = 0; i < pixData.GetLength(0); i++) { int ct = 0; for (int j = 0; j < pixData.GetLength(1); j++) { pixData[i, j] = Vector.FromArray(dataIn[i, ct++], dataIn[i, ct++], dataIn[i, ct++]); } } Range images = new Range(pixData.GetLength(0)); Range pixels = new Range(pixData.GetLength(1)); VariableArray2D <Vector> pixelData = Variable.Constant(pixData, images, pixels); // For each image we have a palette of L multivariate Gaussians Range L = new Range(2); VariableArray2D <Vector> means = Variable.Array <Vector>(images, L).Named("means"); means[images, L] = Variable.VectorGaussianFromMeanAndPrecision( Vector.FromArray(0.5, 0.5, 0.5), PositiveDefiniteMatrix.Identity(3)).ForEach(images, L); VariableArray2D <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(images, L).Named("precs"); precs[images, L] = Variable.WishartFromShapeAndScale(1.0, PositiveDefiniteMatrix.Identity(3)).ForEach(images, L); // Across all pixels we have a VariableArray <Vector> pi = Variable.Array <Vector>(pixels); pi[pixels] = Variable.Dirichlet(L, new double[] { 1.1, 1.0 }).ForEach(pixels); // For each pixel of each image we have a discrete indicator VariableArray2D <int> ind = Variable.Array <int>(images, pixels).Named("ind"); ind[images, pixels] = Variable.Discrete(pi[pixels]).ForEach(images); using (Variable.ForEach(pixels)) { using (Variable.ForEach(images)) { using (Variable.Switch(ind[images, pixels])) { pixelData[images, pixels] = Variable.VectorGaussianFromMeanAndPrecision(means[images, ind[images, pixels]], precs[images, ind[images, pixels]]); } } } InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing()); ie.ShowProgress = true; ie.NumberOfIterations = 5; Console.WriteLine("Dist over L: " + ie.Infer(pi)); }
private void MatrixVectorProductModel() { Matrix a = new Matrix(new double[, ] { { 1, 2, 3 }, { 4, 5, 6 } }); Vector m = Vector.FromArray(0.1, 1.2, 2.3); Vector b = Factor.Random(VectorGaussian.FromMeanAndPrecision(m, PositiveDefiniteMatrix.Identity(3))); Vector c = Factor.Product(a, b); InferNet.Infer(c, nameof(c)); }
public BPM_FixedNoise(int d, int n) { Range j = new Range(n).Named("j"); y = Variable.Array <bool>(j).Named("y"); x = Variable.Array <Vector>(j).Named("x"); noise = Variable.New <double>().Named("noise"); w = Variable.Random(new VectorGaussian(Vector.Zero(d), PositiveDefiniteMatrix.Identity(d))).Named("w"); engine = new InferenceEngine(); y[j] = Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(w, x[j]).Named("innerProduct"), noise) > 0; }
public static void TestLogisticRegression() { const int seed = 39; Rand.Restart(seed); const int d = 10; const int n = 100; const int epIter = 10; Vector w = Vector.Zero(d); Rand.Normal(Vector.Zero(d), PositiveDefiniteMatrix.Identity(d), w); double b = Rand.Normal(0, 1); // double b= 0; Vector[] X; bool[] Y; GenData(n, w, b, out X, out Y); Console.Write("Y: "); StringUtils.PrintArray(Y); VectorGaussian wPost; Gaussian biasPost; Type logisticOp = typeof(KEPLogisticOp); // Type logisticOp = typeof(LogisticOp2); string factorOpPath = Config.PathToFactorOperator( // "serialFactorOp_fm_kgg_joint_irf500_orf1000_n400_iter5_sf1_st20_ntr5000.mat" "serialFactorOp_fm_kgg_joint_irf500_orf1000_proj_n400_iter5_sf1_st20_ntr5000.mat" ); KEPLogisticOpInstance opIns = KEPLogisticOpInstance.LoadLogisticOpInstance(factorOpPath); opIns.SetPrintTrueMessages(true); OpControl.Add(typeof(KEPLogisticOp), opIns); InferCoefficients(X, Y, out wPost, out biasPost, epIter, logisticOp); //print Console.WriteLine("n: {0}", n); Console.WriteLine("d: {0}", d); int t = Y.Sum(o => o ? 1 : 0); Console.WriteLine("number of true: {0}", t); Console.WriteLine("True bias: {0}", b); Console.WriteLine("Inferred bias: {0}", biasPost); Console.WriteLine("True w: {0}", w); Console.WriteLine("Inferred w: "); Console.WriteLine(wPost); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="SumVectorGaussianOp"]/message_doc[@name="SumAverageConditional(IList{VectorGaussian}, VectorGaussian)"]/*'/> public static VectorGaussian SumAverageConditional([SkipIfAnyUniform] IList <VectorGaussian> array, VectorGaussian result) { if (array == null) { throw new ArgumentNullException(nameof(array)); } if (result == null) { throw new ArgumentNullException(nameof(result)); } if (array.Count < 1) { result.Point = Vector.Zero(result.Dimension); return(result); } if (array.Any(element => element == null)) { throw new ArgumentNullException(nameof(array)); } int dimension = result.Dimension; if (array.Any(element => element.Dimension != dimension)) { throw new ArgumentException("The result and all elements of the array must have the same number of dimensions."); } var sumMean = Vector.Zero(dimension); var sumVariance = PositiveDefiniteMatrix.IdentityScaledBy(dimension, 0); var elementMean = Vector.Zero(dimension); var elementVariance = PositiveDefiniteMatrix.Identity(dimension); foreach (var element in array) { if (!element.IsProper()) { return(element); } element.GetMeanAndVariance(elementMean, elementVariance); sumMean.SetToSum(sumMean, elementMean); sumVariance.SetToSum(sumVariance, elementVariance); } result.SetMeanAndVariance(sumMean, sumVariance); return(result); }
internal static IDistribution <Vector[]> RandomGaussianVectorArray(int N, int C) { VectorGaussian[] array = new VectorGaussian[N]; for (int i = 0; i < N; i++) { Vector mean = Vector.Zero(C); for (int j = 0; j < C; j++) { mean[j] = Rand.Normal(); } array[i] = new VectorGaussian(mean, PositiveDefiniteMatrix.Identity(C)); } return(Distribution <Vector> .Array(array)); }
public void ObservedVariableInMarginalPrototype() { var nDimensions = Variable.New <int>().Named("nDimensions"); var d = new Range(nDimensions).Named("d"); var mean = Variable.Observed(Vector.Zero(1)).Named("mean"); mean.SetValueRange(d); var precision = Variable.Observed(PositiveDefiniteMatrix.Identity(1)).Named("precision"); var x = Variable.VectorGaussianFromMeanAndPrecision(mean, precision).Named("x"); mean.ObservedValue = Vector.Zero(1); nDimensions.ObservedValue = 1; InferenceEngine engine = new InferenceEngine(); Console.WriteLine(engine.Infer <VectorGaussian>(x)); }
/// <summary> /// Build and run a multinomial regression model. /// </summary> /// <param name="xObs">An array of vectors of observed inputs. /// The length of the array is the number of samples, and the /// length of the vectors is the number of input features. </param> /// <param name="yObs">An array of array of counts, where the first index is the sample, /// and the second index is the class. </param> /// <param name="bPost">The returned posterior over the coefficients.</param> /// <param name="meanPost">The returned posterior over the means.</param> public void MultinomialRegression(Vector[] xObs, int[][] yObs, out VectorGaussian[] bPost, out Gaussian[] meanPost) { int C = yObs[0].Length; int N = xObs.Length; int K = xObs[0].Count; var c = new Range(C).Named("c"); var n = new Range(N).Named("n"); // model var B = Variable.Array <Vector>(c).Named("coefficients"); B[c] = Variable.VectorGaussianFromMeanAndPrecision( Vector.Zero(K), PositiveDefiniteMatrix.Identity(K)).ForEach(c); var m = Variable.Array <double>(c).Named("mean"); m[c] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(c); Variable.ConstrainEqualRandom(B[C - 1], VectorGaussian.PointMass(Vector.Zero(K))); Variable.ConstrainEqualRandom(m[C - 1], Gaussian.PointMass(0)); var x = Variable.Array <Vector>(n); x.ObservedValue = xObs; var yData = Variable.Array(Variable.Array <int>(c), n); yData.ObservedValue = yObs; var trialsCount = Variable.Array <int>(n); trialsCount.ObservedValue = yObs.Select(o => o.Sum()).ToArray(); var g = Variable.Array(Variable.Array <double>(c), n); g[n][c] = Variable.InnerProduct(B[c], x[n]) + m[c]; var p = Variable.Array <Vector>(n); p[n] = Variable.Softmax(g[n]); using (Variable.ForEach(n)) yData[n] = Variable.Multinomial(trialsCount[n], p[n]); // inference // var ie = new InferenceEngine(new VariationalMessagePassing()); var ie = new InferenceEngine(new ExpectationPropagation()); // ie.Compiler.GivePriorityTo(typeof(SoftmaxOp_KM11_Sparse)); bPost = ie.Infer <VectorGaussian[]>(B); meanPost = ie.Infer <Gaussian[]>(m); }
/// <summary> /// Load data from CSV. /// </summary> /// <param name="csvFilePath">CSV file path.</param> /// <param name="hasHeader">CSV has headers.</param> public virtual void LoadDataFromCsv(string csvFilePath, bool hasHeader = false) { if (initialized == false) { throw new Exception("ML Model has not been initialized yet."); } lock (mlModelCompilation) { hasHeaders = hasHeader; IEnumerable <string> csvLines = File.ReadAllLines(csvFilePath); if (hasHeader) { csvLines = csvLines.Skip(1); } foreach (string line in csvLines) { IList <double> dict = new List <double>(); string[] vals = line.Replace(" ", "").Split(","); for (int i = 0; i < vals.Length - 1; i++) { dict.Add(Convert.ToDouble(vals[i])); } Validity.Add(Convert.ToBoolean(vals[vals.Length - 1])); Data.Add(dict.ToArray()); } // Create random variable w with VectorGuassian (of size 3) // Features => (Gold | Nickel | Copper) Concentration, IsGoodMaterial? w = Variable.Random(new VectorGaussian(Vector.Zero(FeatureCount), PositiveDefiniteMatrix.Identity(FeatureCount))).Named("w"); // Create Observable variable based on valid_ui; y = Variable.Observed(Validity.ToArray()); BayesPointMachine.Run(this, y, w); } }
public void BugsJaws() { Assert.True(false, "This Bugs example is incomplete -- TODO"); Vector[] jawData = { Vector.FromArray(47.8, 48.8, 49.0, 49.7), Vector.FromArray(46.4, 47.3, 47.7, 48.4), Vector.FromArray(46.3, 46.8, 47.8, 48.5), Vector.FromArray(45.1, 45.3, 46.1, 47.2), Vector.FromArray(47.6, 48.5, 48.9, 49.3), Vector.FromArray(52.5, 53.2, 53.3, 53.7), Vector.FromArray(51.2, 53.0, 54.3, 54.5), Vector.FromArray(49.8, 50.0, 50.3, 52.7), Vector.FromArray(48.1, 50.8, 52.3, 54.4), Vector.FromArray(45.0, 47.0, 47.3, 48.3), Vector.FromArray(51.2, 51.4, 51.6, 51.9), Vector.FromArray(48.5, 49.2, 53.0, 55.5), Vector.FromArray(52.1, 52.8, 53.7, 55.0), Vector.FromArray(48.2, 48.9, 49.3, 49.8), Vector.FromArray(49.6, 50.4, 51.2, 51.8), Vector.FromArray(50.7, 51.7, 52.7, 53.3), Vector.FromArray(47.2, 47.7, 48.4, 49.5), Vector.FromArray(53.3, 54.6, 55.1, 55.3), Vector.FromArray(46.2, 47.5, 48.1, 48.4), Vector.FromArray(46.3, 47.6, 51.3, 51.8) }; Vector ageData = Vector.FromArray(8.0, 8.5, 9.0, 9.5); // Rate and therefore scale are both the identity matrix PositiveDefiniteMatrix Scale = PositiveDefiniteMatrix.Identity(4); // Shape parameter k in Bugs is set to 4 - this corresponds to // a value of k/2 = 4 for the Infer.NET shape parameter double shape = 2.0; Range d = new Range(jawData.Length); VariableArray <Vector> data = Variable.Observed <Vector>(jawData, d).Named("data"); Variable <Vector> age = Variable.Observed <Vector>(ageData); Variable <double> beta0 = Variable.GaussianFromMeanAndPrecision(0.0, 0.001).Named("beta0"); Variable <double> beta1 = Variable.GaussianFromMeanAndPrecision(0.0, 0.001).Named("beta1"); Variable <PositiveDefiniteMatrix> omega = Variable.WishartFromShapeAndScale(shape, Scale); //Variable<Vector> mean = age; // beta0 + beta1 * age; //data[d] = Variable.VectorGaussianFromMeanAndPrecision(mean, omega).ForEach(d); }
public DistributionArray <Bernoulli> TestBPM_FixedNoise(double noise, out double noiseEstimate) { noiseEstimate = double.NaN; int K = xtrain[0].Count; // Create target y VariableArray <bool> y = Variable.Observed(ytrain).Named("y"); Variable <Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(K), PositiveDefiniteMatrix.Identity(K))).Named("w"); var mean = Variable.GaussianFromMeanAndPrecision(0, .1); BayesPointMachine_FixedNoise(xtrain, w, y, mean, noise); InferenceEngine engine = new InferenceEngine(); VectorGaussian wPosterior = engine.Infer <VectorGaussian>(w); //Console.WriteLine("Dist over w=\n" + wPosterior); var meanPost = engine.Infer <Gaussian>(mean); VariableArray <bool> yt = Variable.Array <bool>(new Range(ytest.Length)).Named("ytest"); BayesPointMachine_FixedNoise(xtest, Variable.Random(wPosterior).Named("w"), yt, Variable.Random(meanPost), noise); return(engine.Infer <DistributionArray <Bernoulli> >(yt)); }
private void GetItemVectorGaussianModel() { Vector[] array = new Vector[3]; Vector mean = Vector.FromArray(0.1, 0.2); PositiveDefiniteMatrix prec = PositiveDefiniteMatrix.Identity(2); for (int i = 0; i < 3; i++) { array[i] = Factor.Random(new VectorGaussian(mean, prec)); } Vector c = Factor.GetItem(array, 1); Vector[] d = new Vector[2]; if (true) // preserve the previous assignment { d = Factor.GetItems(array, new int[] { 0, 2 }); } InferNet.Infer(c, nameof(c)); InferNet.Infer(d, nameof(d)); }
public int SyntheticData(int D, int N, int Ntest, double noiseVariance) { xtrain = new Vector[N]; xtest = new Vector[Ntest]; ytrain = new bool[N]; ytest = new bool[Ntest]; var N0I = new VectorGaussian(Vector.Zero(D), PositiveDefiniteMatrix.Identity(D)); var trueW = N0I.Sample(); for (int i = 0; i < N; i++) { xtrain[i] = N0I.Sample(); ytrain[i] = (xtrain[i].Inner(trueW) + Gaussian.Sample(0, 1.0 / noiseVariance)) > 0.0; } for (int i = 0; i < Ntest; i++) { xtest[i] = N0I.Sample(); ytest[i] = (xtest[i].Inner(trueW) + Gaussian.Sample(0, 1.0 / noiseVariance)) > 0.0; } return(0); }
public BayesPointMachine(int nFeatures, double noise) { // Training model nTrain = Variable.New <int>().Named("nTrain"); Range trainItem = new Range(nTrain).Named("trainItem"); trainingLabels = Variable.Array <bool>(trainItem).Named("trainingLabels"); trainingItems = Variable.Array <Vector>(trainItem).Named("trainingItems"); weights = Variable.Random(new VectorGaussian(Vector.Zero(nFeatures), PositiveDefiniteMatrix.Identity(nFeatures))).Named("weights"); trainingLabels[trainItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, trainingItems[trainItem]), noise)); // Testing model nTest = Variable.New <int>().Named("nTest"); Range testItem = new Range(nTest).Named("testItem"); testItems = Variable.Array <Vector>(testItem).Named("testItems"); testLabels = Variable.Array <bool>(testItem).Named("testLabels"); if (singleModel) { testLabels[testItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, testItems[testItem]), noise)); testEngine = new InferenceEngine(new ExpectationPropagation()); testEngine.NumberOfIterations = 2; } else { weightPosterior = Variable.New <VectorGaussian>().Named("weightPosterior"); Variable <Vector> testWeights = Variable <Vector> .Random(weightPosterior); testLabels[testItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(testWeights, testItems[testItem]), noise)); trainEngine = new InferenceEngine(new ExpectationPropagation()); trainEngine.ShowProgress = false; trainEngine.NumberOfIterations = 5; testEngine = new InferenceEngine(new ExpectationPropagation()); testEngine.ShowProgress = false; testEngine.NumberOfIterations = 1; } }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="SumVectorGaussianOp"]/message_doc[@name="ArrayAverageLogarithm1{TVectorGaussianList}(VectorGaussian, IList{VectorGaussian}, TVectorGaussianList)"]/*'/> /// <typeparam name="TVectorGaussianList">A list of <see cref="VectorGaussian"/> distributions.</typeparam> public static TVectorGaussianList ArrayAverageLogarithm1 <TVectorGaussianList>( [SkipIfUniform] VectorGaussian sum, [Stochastic, Proper] IList <VectorGaussian> array, TVectorGaussianList to_array) where TVectorGaussianList : IList <VectorGaussian> { // Check inputs for consistency int dimension = CheckArgumentConsistency(sum, sum, array, to_array); TVectorGaussianList result = to_array; var sumMean = Vector.Zero(dimension); var sumVariance = PositiveDefiniteMatrix.Identity(dimension); sum.GetMeanAndVariance(sumMean, sumVariance); // This version does one update of q(array[i]) for each array element in turn. Vector arraySumOfMean = Vector.Zero(dimension); foreach (VectorGaussian element in array) { arraySumOfMean.SetToSum(arraySumOfMean, element.GetMean()); } for (int i = 0; i < result.Count; i++) { arraySumOfMean.SetToDifference(arraySumOfMean, array[i].GetMean()); VectorGaussian oldResult = result[i]; result[i] = new VectorGaussian(sumMean - arraySumOfMean, sumVariance); oldResult.SetToRatio(result[i], oldResult); oldResult.SetToProduct(array[i], oldResult); arraySumOfMean.SetToSum(arraySumOfMean, oldResult.GetMean()); } return(result); }