public void WriteRecords(string filePath, Dictionary <string, object> extra = null) { MatlabWriter writer = new MatlabWriter(filePath); WriteGammaList(writer, InPrecision, "inShape", "inRate"); WriteGammaList(writer, OutPrecision, "outShape", "outRate"); // MatlabWritter cannot write boolean writer.Write("consultOracle", MatrixUtils.ToDouble(ConsultOracle)); Matrix uncertaintyMat = MatrixUtils.StackColumnsIgnoreNull(Uncertainty); // write a Matrix writer.Write("uncertainty", uncertaintyMat); WriteGammaList(writer, OracleOut, "oraOutShape", "oraOutRate"); if (extra != null) { foreach (var kv in extra) { writer.Write(kv.Key, kv.Value); } } writer.Dispose(); }
public static void WriteGammaList(MatlabWriter writer, List <Gamma?> gList, string shapeName, string rateName) { double[] shapes, rates; GammaListToArrays(gList, out shapes, out rates); writer.Write(shapeName, shapes); writer.Write(rateName, rates); }
internal void StudentIsPositiveTest4() { double shape = 1; Gamma precPrior = Gamma.FromShapeAndRate(shape, shape); // mean=-1 causes improper messages double mean = -1; Gaussian meanPrior = Gaussian.PointMass(mean); double evExpected; Gaussian xExpected = StudentIsPositiveExact(mean, precPrior, out evExpected); GaussianOp.ForceProper = false; GaussianOp_Laplace.modified = true; GaussianOp_Laplace.modified2 = true; Gaussian xF = Gaussian.Uniform(); Gaussian xB = Gaussian.Uniform(); Gamma q = GaussianOp_Laplace.QInit(); double r0 = 0.38; r0 = 0.1; for (int iter = 0; iter < 20; iter++) { q = GaussianOp_Laplace.Q(xB, meanPrior, precPrior, q); //xF = GaussianOp_Laplace.SampleAverageConditional(xB, meanPrior, precPrior, q); xF = Gaussian.FromMeanAndPrecision(mean, r0); xB = IsPositiveOp.XAverageConditional(true, xF); Console.WriteLine("xF = {0} xB = {1}", xF, xB); } Console.WriteLine("x = {0} should be {1}", xF * xB, xExpected); double[] precs = EpTests.linspace(1e-3, 5, 100); double[] evTrue = new double[precs.Length]; double[] evApprox = new double[precs.Length]; double[] evApprox2 = new double[precs.Length]; //r0 = q.GetMean(); double sum = 0, sum2 = 0; for (int i = 0; i < precs.Length; i++) { double r = precs[i]; Gaussian xFt = Gaussian.FromMeanAndPrecision(mean, r); evTrue[i] = IsPositiveOp.LogAverageFactor(true, xFt) + precPrior.GetLogProb(r); evApprox[i] = IsPositiveOp.LogAverageFactor(true, xF) + precPrior.GetLogProb(r) + xB.GetLogAverageOf(xFt) - xB.GetLogAverageOf(xF); evApprox2[i] = IsPositiveOp.LogAverageFactor(true, xF) + precPrior.GetLogProb(r0) + q.GetLogProb(r) - q.GetLogProb(r0); sum += System.Math.Exp(evApprox[i]); sum2 += System.Math.Exp(evApprox2[i]); } Console.WriteLine("r0 = {0}: {1} {2} {3}", r0, sum, sum2, q.GetVariance() + System.Math.Pow(r0 - q.GetMean(), 2)); //TODO: change path for cross platform using using (var writer = new MatlabWriter(@"..\..\..\Tests\student.mat")) { writer.Write("z", evTrue); writer.Write("z2", evApprox); writer.Write("z3", evApprox2); writer.Write("precs", precs); } }
public void WriteBadMatrixThrowsArgumentException() { var matrix = Matrix <float> .Build.Dense(1, 1); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile1", matrix, string.Empty)); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile1", matrix, null)); }
static void Main(string[] args) { var data = Matrix <double> .Build.Random(2, 2); DelimitedWriter.Write("123", data); MatlabWriter.Write("123", Matrix <double> .Build.Random(2, 2), "a"); }
public static void TestWritingMat() { string p = Config.PathToSavedFile("test_save_mat.mat"); MatlabWriter w = new MatlabWriter(p); w.Write("b", 2); double[] arr = new double[] { 1, 2, 3 }; w.Write("arr", arr); Vector vec = Vector.FromArray(new double[] { 4, 5, 6, 7 }); w.Write("vec", vec); List <double> list = new List <double>(arr); w.Write("list", list); Matrix m = Matrix.Parse("1 2\n 3 4"); w.Write("m", m); long time = 1329L; w.Write("longNum", time); List <Matrix> mats = new List <Matrix>(); mats.Add(Matrix.IdentityScaledBy(2, 3.0)); mats.Add(Matrix.IdentityScaledBy(3, 4.0)); w.Write("list_mats", mats); w.Dispose(); }
public void CanWriteComplexMatrices() { var mat1 = Matrix <Complex> .Build.Dense(5, 3); for (var i = 0; i < mat1.ColumnCount; i++) { mat1[i, i] = new Complex(i + .1, i + .1); } var mat2 = Matrix <Complex> .Build.Dense(4, 5); for (var i = 0; i < mat2.RowCount; i++) { mat2[i, i] = new Complex(i + .1, i + .1); } var mat3 = Matrix <Complex> .Build.Sparse(5, 4); mat3[0, 0] = new Complex(1.1, 1.1); mat3[0, 2] = new Complex(2.2, 2.2); mat3[4, 3] = new Complex(3.3, 3.3); var mat4 = Matrix <Complex> .Build.Sparse(3, 5); mat4[0, 0] = new Complex(1.1, 1.1); mat4[0, 2] = new Complex(2.2, 2.2); mat4[2, 4] = new Complex(3.3, 3.3); Matrix <Complex>[] write = { mat1, mat2, mat3, mat4 }; string[] names = { "mat1", "dense_matrix_2", "s1", "sparse2" }; if (File.Exists("testz.mat")) { File.Delete("testz.mat"); } MatlabWriter.Write("testz.mat", write, names); var read = MatlabReader.ReadAll <Complex>("testz.mat", names); Assert.AreEqual(write.Length, read.Count); for (var i = 0; i < write.Length; i++) { var w = write[i]; var r = read[names[i]]; Assert.AreEqual(w.RowCount, r.RowCount); Assert.AreEqual(w.ColumnCount, r.ColumnCount); Assert.IsTrue(w.Equals(r)); } File.Delete("testz.mat"); }
public void CanWriteDoubleMatrices() { Matrix <double> mat1 = Matrix <double> .Build.Dense(5, 5); for (var i = 0; i < mat1.ColumnCount; i++) { mat1[i, i] = i + .1; } Matrix <double> mat2 = Matrix <double> .Build.Dense(4, 5); for (var i = 0; i < mat2.RowCount; i++) { mat2[i, i] = i + .1; } Matrix <double> mat3 = Matrix <double> .Build.Sparse(5, 4); mat3[0, 0] = 1.1; mat3[0, 2] = 2.2; mat3[4, 3] = 3.3; Matrix <double> mat4 = Matrix <double> .Build.Sparse(3, 5); mat4[0, 0] = 1.1; mat4[0, 2] = 2.2; mat4[2, 4] = 3.3; Matrix <double>[] write = { mat1, mat2, mat3, mat4 }; string[] names = { "mat1", "dense_matrix_2", "s1", "sparse2" }; if (File.Exists("testd.mat")) { File.Delete("testd.mat"); } MatlabWriter.Write("testd.mat", write, names); var read = MatlabReader.ReadAll <double>("testd.mat", names); Assert.AreEqual(write.Length, read.Count); for (var i = 0; i < write.Length; i++) { var w = write[i]; var r = read[names[i]]; Assert.AreEqual(w.RowCount, r.RowCount); Assert.AreEqual(w.ColumnCount, r.ColumnCount); Assert.IsTrue(w.Equals(r)); } File.Delete("testd.mat"); }
public void WriteBadMatricesThrowsArgumentException() { Matrix <float> matrix = Matrix <float> .Build.Dense(1, 1); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", matrix, string.Empty)); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", matrix, null)); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", matrix, "some matrix")); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", new[] { matrix }, new[] { string.Empty })); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", new[] { matrix }, new string[] { null })); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", new[] { matrix, matrix }, new[] { "matrix" })); Assert.Throws <ArgumentException>(() => MatlabWriter.Write("somefile3", new[] { matrix }, new[] { "some matrix" })); }
public void MatlabWriteNumericNameTest() { string fileName = $"{System.IO.Path.GetTempPath()}MatlabWriteNumericNameTest{Environment.CurrentManagedThreadId}.mat"; using (MatlabWriter writer = new MatlabWriter(fileName)) { writer.Write("24", new int[0]); } Dictionary <string, object> vars = MatlabReader.Read(fileName); int[] ints = (int[])vars["24"]; Assert.Empty(ints); }
//[DeploymentItem(@"Data\test.mat", "Data")] public void MatlabWriterTest() { Dictionary <string, object> dict = MatlabReader.Read(Path.Combine(TestUtils.DataFolderPath, "test.mat")); string fileName = $"{System.IO.Path.GetTempPath()}MatlabWriterTest{Environment.CurrentManagedThreadId}.mat"; using (MatlabWriter writer = new MatlabWriter(fileName)) { foreach (var entry in dict) { writer.Write(entry.Key, entry.Value); } } MatlabReaderTester(fileName); }
public static void ToLogisticSerializeToMat(string file, List <Tuple <Beta, Gaussian, Beta> > msgs, Dictionary <string, object> extra) { int n = msgs.Count; double[] outBetaA = new double[n]; double[] outBetaB = new double[n]; double[] inNormalMeans = new double[n]; double[] inNormalVariances = new double[n]; // alpha parameters double[] inBetaA = new double[n]; double[] inBetaB = new double[n]; for (int i = 0; i < msgs.Count; i++) { Tuple <Beta, Gaussian, Beta> pairI = msgs[i]; Beta toBeta = pairI.Item1; Gaussian fromX = pairI.Item2; Beta fromLogistic = pairI.Item3; fromX.GetMeanAndVariance(out inNormalMeans[i], out inNormalVariances[i]); outBetaA[i] = toBeta.TrueCount; outBetaB[i] = toBeta.FalseCount; double alpha = fromLogistic.TrueCount; double beta = fromLogistic.FalseCount; inBetaA[i] = alpha; inBetaB[i] = beta; } // write to .mat file MatlabWriter matWriter = new MatlabWriter(file); matWriter.Write("outBetaA", outBetaA); matWriter.Write("outBetaB", outBetaB); matWriter.Write("inNormalMeans", inNormalMeans); matWriter.Write("inNormalVariances", inNormalVariances); matWriter.Write("inBetaA", inBetaA); matWriter.Write("inBetaB", inBetaB); if (extra != null) { foreach (var kv in extra) { matWriter.Write(kv.Key, kv.Value); } } matWriter.Dispose(); }
//[DeploymentItem(@"Data\test.mat", "Data")] public void MatlabWriterTest() { Dictionary <string, object> dict = MatlabReader.Read(Path.Combine( #if NETCORE Path.GetDirectoryName(typeof(PsychTests).Assembly.Location), // work dir is not the one with Microsoft.ML.Probabilistic.Tests.dll on netcore and neither is .Location on netfull #endif "Data", "test.mat")); string fileName = $"{System.IO.Path.GetTempPath()}MatlabWriterTest{Environment.CurrentManagedThreadId}.mat"; using (MatlabWriter writer = new MatlabWriter(fileName)) { foreach (var entry in dict) { writer.Write(entry.Key, entry.Value); } } MatlabReaderTester(fileName); }
//保存数据 private void button1_Click(object sender, EventArgs e) { double[,] matdata = new double[2, dataList[0].Count]; for (var i = 0; i < 2; i++) { for (var j = 0; j < dataList.ElementAt(i).Count; j++) { matdata[i, j] = dataList.ElementAt(i).ElementAt(j); } } Matrix <double> matrix = Matrix <double> .Build.DenseOfArray(matdata); String path = "D:\\TestDataFile\\thmat\\" + DateTime.Now.ToString("MM -dd-H-mm-ss_") + ".mat"; MatlabWriter.Write(path, matrix, "data"); }
/// <summary> /// Escribe la salida del clasificador como una matriz /// </summary> /// <param name="list"> lista de objetos OutputAndTarget o salidas del clasificador cntk</param> /// <param name="path"> direccion de destino del archivo .mat a crear</param> public static void saveDataMatlabMatrix(List <NetEvaluation.OutputAndTarget> list, string path) { Matrix <float> target = Matrix <float> .Build.Dense(list.Count, 1); //rows igual a el numero de registros //colums igual al numero de valores en el registro Matrix <float> salida = Matrix <float> .Build.Dense(list.Count, list[0].Output.Length); for (int i = 0; i < list.Count; i++) { target.At(i, 0, list[i].Target); salida.SetRow(i, list[i].Output); } var dict = new Dictionary <string, Matrix <float> >(); dict.Add("target", target); dict.Add("output", salida); MatlabWriter.Write(path, dict); }
public void Save(string file) { if (file.EndsWith(".mat")) { MatlabWriter.Write <Complex>(file, matrix, "A"); } else if (file.EndsWith(".mtx")) { MatrixMarketWriter.WriteMatrix <Complex>(file, matrix); } else if (file.EndsWith(".csv")) { DelimitedWriter.Write <Complex>(file, matrix, ";"); } else { throw new NotImplementedException(); } }
public void MatlabWriteStringListTest() { List <string> strings = new List <string>(); strings.Add("a"); strings.Add("b"); string fileName = $"{System.IO.Path.GetTempPath()}MatlabWriteStringListTest{Environment.CurrentManagedThreadId}.mat"; using (MatlabWriter writer = new MatlabWriter(fileName)) { writer.Write("strings", strings); } Dictionary <string, object> vars = MatlabReader.Read(fileName); string[] array = (string[])vars["strings"]; for (int i = 0; i < array.Length; i++) { Assert.Equal(strings[i], array[i]); } }
public void MatlabWriteStringDictionaryTest() { Dictionary <string, string> dictString = new Dictionary <string, string>(); dictString["a"] = "a"; dictString["b"] = "b"; string fileName = $"{System.IO.Path.GetTempPath()}MatlabWriteStringDictionaryTest{Environment.CurrentManagedThreadId}.mat"; using (MatlabWriter writer = new MatlabWriter(fileName)) { writer.Write("dictString", dictString); } Dictionary <string, object> vars = MatlabReader.Read(fileName); Dictionary <string, object> dict = (Dictionary <string, object>)vars["dictString"]; foreach (var entry in dictString) { Assert.Equal(dictString[entry.Key], dict[entry.Key]); } }
private void TestCollocationPointCreation() { var model = new CollocationModel(); ModelCreator modelCreator = new ModelCreator(model); string filename = "..\\..\\..\\InputFiles\\PlateWithHole.txt"; IsogeometricReader modelReader = new IsogeometricReader(modelCreator, filename); modelReader.CreateCollocationModelFromFile(); //var solverBuilder = new SuiteSparseSolver.Builder(); //solverBuilder.DofOrderer = new DofOrderer( // new NodeMajorDofOrderingStrategy(), new NullReordering()); var solverBuilder = new GmresSolver.Builder(); ISolver solver = new GmresSolver(model, new AsymmetricDofOrderer(new RowDofOrderingStrategy()), new DofOrderer(new NodeMajorDofOrderingStrategy(), new NullReordering())); // Structural problem provider var provider = new ProblemStructural(model, solver); // Linear static analysis var childAnalyzer = new LinearAnalyzer(model, solver, provider); var parentAnalyzer = new StaticAnalyzer(model, solver, provider, childAnalyzer); // Run the analysis parentAnalyzer.Initialize(); parentAnalyzer.BuildMatrices(); var k = solver.LinearSystems[0].Matrix; Matrix <double> kmatlab = MathNet.Numerics.LinearAlgebra.CreateMatrix.Dense <double>(k.NumRows, k.NumColumns); for (int i = 0; i < k.NumRows; i++) { for (int j = 0; j < k.NumColumns; j++) { kmatlab[i, j] = k[i, j]; } } MatlabWriter.Write("..\\..\\..\\InputFiles\\KcolMsolve.mat", kmatlab, "Ktotal"); }
public void WriteBadMatricesThrowsArgumentException() { var matrix = Matrix <float> .Build.Dense(1, 1); var filePath = Path.GetTempFileName(); try { Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, matrix, string.Empty)); Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, matrix, null)); Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, matrix, "some matrix")); Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, new[] { matrix }, new[] { string.Empty })); Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, new[] { matrix }, new string[] { null })); Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, new[] { matrix, matrix }, new[] { "matrix" })); Assert.Throws <ArgumentException>(() => MatlabWriter.Write(filePath, new[] { matrix }, new[] { "some matrix" })); } finally { File.Delete(filePath); } }
public void WriteNullMatrixThrowsArgumentNullException() { Assert.Throws <ArgumentNullException>(() => MatlabWriter.Write <double>("somefile2", null, "matrix")); }
public static void LogisticIrt(int numParams, PriorType priorType, AlgorithmType algType, string conditionPrefix = "") { // timing on Intel Core 2 Duo P9500 with 4GB RAM running Windows Vista // 10_250 trial 1: // Bayesian/Hierarchical 2000 = 5.4s inference only // Variational/Hierarchical 50 iter = 4.2s inference only // Variational/Hierarchical 10 iter = 0.85s inference only // Variational_JJ/Hierarchical 50 iter = 0.1s inference only // Variational_JJ/Hierarchical 10 iter = 0.04s inference only // time on desktop: // Variational/Hierarchical 10 iter = 0.75s inference only (including test) // Variational_JJ/Hierarchical 10 iter = 0.07s inference only (including test) LogisticIrtModel train = new LogisticIrtModel(numParams, priorType); //train.engine.NumberOfIterations = 100; //train.engine.ShowTimings = true; string logistic_type = ""; //logistic_type = "JJ"; if (logistic_type == "JJ") { train.engine.Compiler.GivePriorityTo(typeof(LogisticOp_JJ96)); } bool specialInitialization = false; if (specialInitialization) { // change initialization train.abilityMean.InitialiseTo(new Gaussian(5, 10)); train.abilityPrecision.InitialiseTo(new Gamma(1, 10)); train.difficultyMean.InitialiseTo(new Gaussian(5, 10)); train.difficultyPrecision.InitialiseTo(new Gamma(1, 10)); } LogisticIrtTestModel test = new LogisticIrtTestModel(numParams); train.engine.ShowProgress = false; test.engine.ShowProgress = false; if (algType == AlgorithmType.Variational) { train.engine.Algorithm = new VariationalMessagePassing(); test.engine.Algorithm = new VariationalMessagePassing(); } bool showTiming = false; string baseFolder = @"..\..\"; string modelName = numParams + "-PL"; //modelName = "Mild_skew"; //modelName = "Extreme_skew"; //modelName = "Lsat"; //modelName = "Wide_b"; string modelFolder = baseFolder + @"Data_mat\" + modelName; DirectoryInfo modelDir = new DirectoryInfo(modelFolder); foreach (DirectoryInfo conditionDir in modelDir.GetDirectories()) { string condition = conditionDir.Name; if (!condition.StartsWith(conditionPrefix)) { continue; } int trimStart = condition.Length - 1; string inputFolder = baseFolder + @"Data_mat\" + modelName + @"\" + condition; string alg; if (algType == AlgorithmType.Variational) { alg = "Variational" + logistic_type + @"\" + priorType; } else { alg = algType + @"\" + priorType; } string outputFolder = baseFolder + @"Estimates_mat\" + modelName + @"\" + alg + @"\" + condition; Console.WriteLine(outputFolder); DirectoryInfo outputDir = Directory.CreateDirectory(outputFolder); DirectoryInfo inputDir = new DirectoryInfo(inputFolder); foreach (FileInfo file in inputDir.GetFiles("*.mat")) { string name = file.Name; string number = name; //.Substring(trimStart); string outputFileName = outputFolder + @"\" + number; if (File.Exists(outputFileName)) { continue; } Console.WriteLine(file.FullName); Dictionary <string, object> dict = MatlabReader.Read(file.FullName); Matrix m = (Matrix)dict["Y"]; Gaussian[] abilityPost, difficultyPost; Gamma[] discriminationPost = null; Beta[] guessProbPost = null; Matrix responseProbMean; if (algType != AlgorithmType.MCMC) { // VMP Stopwatch watch = new Stopwatch(); watch.Start(); train.ObserveResponses(m); train.RunToConvergence(); abilityPost = train.engine.Infer <Gaussian[]>(train.ability); difficultyPost = train.engine.Infer <Gaussian[]>(train.difficulty); if (numParams >= 2) { discriminationPost = train.engine.Infer <Gamma[]>(train.discrimination); } if (numParams >= 3) { guessProbPost = train.engine.Infer <Beta[]>(train.guessProb); } responseProbMean = test.GetResponseProbs(abilityPost, difficultyPost, discriminationPost, guessProbPost); watch.Stop(); if (showTiming) { Console.WriteLine(algType + " elapsed time = {0}ms", watch.ElapsedMilliseconds); } } else { // sampler LogisticIrtSampler sampler = new LogisticIrtSampler(); sampler.abilityMeanPrior = Gaussian.FromMeanAndVariance(0, 1e6); sampler.abilityPrecPrior = Gamma.FromShapeAndRate(1, 1); sampler.difficultyMeanPrior = Gaussian.FromMeanAndVariance(0, 1e6); sampler.difficultyPrecPrior = Gamma.FromShapeAndRate(1, 1); sampler.discriminationMeanPrior = Gaussian.FromMeanAndVariance(0, 1e6); sampler.discriminationPrecPrior = Gamma.FromShapeAndRate(1, 1); // for debugging //sampler.abilityObserved = ((Matrix)dict["ability"]).ToArray<double>(); //sampler.difficultyObserved = ((Matrix)dict["difficulty"]).ToArray<double>(); //sampler.discriminationObserved = ((Matrix)dict["discrimination"]).ToArray<double>(); if (train.abilityMean.IsObserved) { sampler.abilityMeanPrior = Gaussian.PointMass(train.abilityMean.ObservedValue); } if (train.abilityPrecision.IsObserved) { sampler.abilityPrecPrior = Gamma.PointMass(train.abilityPrecision.ObservedValue); } if (train.difficultyMean.IsObserved) { sampler.difficultyMeanPrior = Gaussian.PointMass(train.difficultyMean.ObservedValue); } if (train.difficultyPrecision.IsObserved) { sampler.difficultyPrecPrior = Gamma.PointMass(train.difficultyPrecision.ObservedValue); } if (train.discriminationMean.IsObserved) { sampler.discriminationMeanPrior = Gaussian.PointMass(train.discriminationMean.ObservedValue); } if (train.discriminationPrecision.IsObserved) { sampler.discriminationPrecPrior = Gamma.PointMass(train.discriminationPrecision.ObservedValue); } Stopwatch watch = new Stopwatch(); watch.Start(); sampler.Sample(new Options(), m); abilityPost = sampler.abilityPost; difficultyPost = sampler.difficultyPost; responseProbMean = sampler.responseProbMean; discriminationPost = sampler.discriminationPost; watch.Stop(); if (showTiming) { Console.WriteLine("MCMC elapsed time = {0}ms", watch.ElapsedMilliseconds); } } bool showEstimates = false; if (showEstimates) { Console.WriteLine("abilityMean = {0}", train.engine.Infer(train.abilityMean)); Console.WriteLine("abilityPrecision = {0}", train.engine.Infer(train.abilityPrecision)); //Console.WriteLine("abilityMean2 = {0}", train.engine.Infer(train.abilityMean2)); //Console.WriteLine("abilityPrecision2 = {0}", train.engine.Infer(train.abilityPrecision2)); Console.WriteLine("difficultyMean = {0}", train.engine.Infer(train.difficultyMean)); Console.WriteLine("difficultyPrecision = {0}", train.engine.Infer(train.difficultyPrecision)); } if (showEstimates) { for (int i = 0; i < 10; i++) { Console.WriteLine(responseProbMean[i]); } //Console.WriteLine(ToMeanMatrix(difficultyPost)); } using (MatlabWriter writer = new MatlabWriter(outputFileName)) { writer.Write("ability", ToMeanMatrix(abilityPost)); writer.Write("ability_se", ToStddevMatrix(abilityPost)); writer.Write("difficulty", ToMeanMatrix(difficultyPost)); writer.Write("difficulty_se", ToStddevMatrix(difficultyPost)); if (discriminationPost != null) { writer.Write("discrimination", ToMeanMatrix(discriminationPost)); writer.Write("discrimination_se", ToStddevMatrix(discriminationPost)); } if (guessProbPost != null) { writer.Write("guessing", ToMeanAndStddevMatrix(guessProbPost)); } writer.Write("p", responseProbMean); } //break; } //break; } }
internal void HeteroscedasticGPR() { // This model is based on the paper "Most Likely Heteroscedastic Gaussian Process Regression" by Kersting et al, ICML 2007 // Silverman's motorcycle benchmark dataset double[] inputs = new double[] { 2.4, 2.6, 3.2, 3.6, 4, 6.2, 6.6, 6.8, 7.8, 8.199999999999999, 8.800000000000001, 8.800000000000001, 9.6, 10, 10.2, 10.6, 11, 11.4, 13.2, 13.6, 13.8, 14.6, 14.6, 14.6, 14.6, 14.6, 14.6, 14.8, 15.4, 15.4, 15.4, 15.4, 15.6, 15.6, 15.8, 15.8, 16, 16, 16.2, 16.2, 16.2, 16.4, 16.4, 16.6, 16.8, 16.8, 16.8, 17.6, 17.6, 17.6, 17.6, 17.8, 17.8, 18.6, 18.6, 19.2, 19.4, 19.4, 19.6, 20.2, 20.4, 21.2, 21.4, 21.8, 22, 23.2, 23.4, 24, 24.2, 24.2, 24.6, 25, 25, 25.4, 25.4, 25.6, 26, 26.2, 26.2, 26.4, 27, 27.2, 27.2, 27.2, 27.6, 28.2, 28.4, 28.4, 28.6, 29.4, 30.2, 31, 31.2, 32, 32, 32.8, 33.4, 33.8, 34.4, 34.8, 35.2, 35.2, 35.4, 35.6, 35.6, 36.2, 36.2, 38, 38, 39.2, 39.4, 40, 40.4, 41.6, 41.6, 42.4, 42.8, 42.8, 43, 44, 44.4, 45, 46.6, 47.8, 47.8, 48.8, 50.6, 52, 53.2, 55, 55, 55.4, 57.6 }; double[] outputs = new double[] { 0, -1.3, -2.7, 0, -2.7, -2.7, -2.7, -1.3, -2.7, -2.7, -1.3, -2.7, -2.7, -2.7, -5.4, -2.7, -5.4, 0, -2.7, -2.7, 0, -13.3, -5.4, -5.4, -9.300000000000001, -16, -22.8, -2.7, -22.8, -32.1, -53.5, -54.9, -40.2, -21.5, -21.5, -50.8, -42.9, -26.8, -21.5, -50.8, -61.7, -5.4, -80.40000000000001, -59, -71, -91.09999999999999, -77.7, -37.5, -85.59999999999999, -123.1, -101.9, -99.09999999999999, -104.4, -112.5, -50.8, -123.1, -85.59999999999999, -72.3, -127.2, -123.1, -117.9, -134, -101.9, -108.4, -123.1, -123.1, -128.5, -112.5, -95.09999999999999, -81.8, -53.5, -64.40000000000001, -57.6, -72.3, -44.3, -26.8, -5.4, -107.1, -21.5, -65.59999999999999, -16, -45.6, -24.2, 9.5, 4, 12, -21.5, 37.5, 46.9, -17.4, 36.2, 75, 8.1, 54.9, 48.2, 46.9, 16, 45.6, 1.3, 75, -16, -54.9, 69.59999999999999, 34.8, 32.1, -37.5, 22.8, 46.9, 10.7, 5.4, -1.3, -21.5, -13.3, 30.8, -10.7, 29.4, 0, -10.7, 14.7, -1.3, 0, 10.7, 10.7, -26.8, -14.7, -13.3, 0, 10.7, -14.7, -2.7, 10.7, -2.7, 10.7 }; Range j = new Range(inputs.Length); Vector[] inputsVec = Util.ArrayInit(inputs.Length, i => Vector.FromArray(inputs[i])); VariableArray <Vector> x = Variable.Observed(inputsVec, j).Named("x"); VariableArray <double> y = Variable.Observed(outputs, j).Named("y"); // Set up the GP prior, which will be filled in later Variable <SparseGP> prior = Variable.New <SparseGP>().Named("prior"); Variable <SparseGP> prior2 = Variable.New <SparseGP>().Named("prior2"); // The sparse GP variable - a distribution over functions Variable <IFunction> f = Variable <IFunction> .Random(prior).Named("f"); Variable <IFunction> r = Variable <IFunction> .Random(prior2).Named("r"); Variable <double> mean = Variable.FunctionEvaluate(f, x[j]).Named("mean"); Variable <double> logVariance = Variable.FunctionEvaluate(r, x[j]).Named("logVariance"); Variable <double> variance = Variable.Exp(logVariance); y[j] = Variable.GaussianFromMeanAndVariance(mean, variance); InferenceEngine engine = new InferenceEngine(); GaussianProcess gp = new GaussianProcess(new ConstantFunction(0), new SquaredExponential(0)); GaussianProcess gp2 = new GaussianProcess(new ConstantFunction(0), new SquaredExponential(0)); // Fill in the sparse GP prior //Vector[] basis = Util.ArrayInit(120, i => Vector.FromArray(0.5*i)); Vector[] basis = Util.ArrayInit(60, i => Vector.FromArray(1.0 * i)); prior.ObservedValue = new SparseGP(new SparseGPFixed(gp, basis)); prior2.ObservedValue = new SparseGP(new SparseGPFixed(gp2, basis)); // Infer the posterior Sparse GP SparseGP sgp = engine.Infer <SparseGP>(f); // Check that training set is classified correctly Console.WriteLine(); Console.WriteLine("Predictions on training set:"); for (int i = 0; i < outputs.Length; i++) { Gaussian post = sgp.Marginal(inputsVec[i]); //double postMean = post.GetMean(); Console.WriteLine("f({0}) = {1}", inputs[i], post); } //TODO: change path for cross platform using using (MatlabWriter writer = new MatlabWriter(@"..\..\HGPR.mat")) { int n = outputs.Length; double[] m = new double[n]; double[] s = new double[n]; for (int i = 0; i < n; i++) { Gaussian post = sgp.Marginal(inputsVec[i]); double mi, vi; post.GetMeanAndVariance(out mi, out vi); m[i] = mi; s[i] = System.Math.Sqrt(vi); } writer.Write("mean", m); writer.Write("std", s); } }
public void Sample(Options options, Matrix data) { if (options.numParams > 2) { throw new Exception("numParams > 2"); } int numStudents = data.Rows; int numQuestions = data.Cols; // initialize the sampler at the mean of the priors (not sampling from the priors) double abilityMean = abilityMeanPrior.GetMean(); double abilityPrec = abilityPrecPrior.GetMean(); double difficultyMean = difficultyMeanPrior.GetMean(); double difficultyPrec = difficultyPrecPrior.GetMean(); double discriminationMean = discriminationMeanPrior.GetMean(); double discriminationPrec = discriminationPrecPrior.GetMean(); double[] ability = new double[numStudents]; double[] difficulty = new double[numQuestions]; List <double>[] difficultySamples = new List <double> [numQuestions]; GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions]; for (int question = 0; question < numQuestions; question++) { difficultyEstimator[question] = new GaussianEstimator(); difficultySamples[question] = new List <double>(); if (difficultyObserved != null) { difficulty[question] = difficultyObserved[question]; difficultyEstimator[question].Add(difficultyObserved[question]); difficultySamples[question].Add(difficultyObserved[question]); } } List <double>[] abilitySamples = new List <double> [numStudents]; GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length]; for (int student = 0; student < abilityEstimator.Length; student++) { abilityEstimator[student] = new GaussianEstimator(); abilitySamples[student] = new List <double>(); if (abilityObserved != null) { ability[student] = abilityObserved[student]; abilityEstimator[student].Add(abilityObserved[student]); abilitySamples[student].Add(abilityObserved[student]); } } double[] discrimination = new double[numQuestions]; List <double>[] discriminationSamples = new List <double> [numQuestions]; GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions]; for (int question = 0; question < numQuestions; question++) { discriminationEstimator[question] = new GammaEstimator(); discriminationSamples[question] = new List <double>(); discrimination[question] = 1; if (discriminationObserved != null) { discrimination[question] = discriminationObserved[question]; discriminationEstimator[question].Add(discriminationObserved[question]); discriminationSamples[question].Add(discriminationObserved[question]); } } responseProbMean = new Matrix(numStudents, numQuestions); int niters = options.numberOfSamples; int burnin = options.burnIn; double logisticVariance = Math.PI * Math.PI / 3; double shape = 4.5; Gamma precPrior = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance); precPrior = Gamma.PointMass(1); double[,] prec = new double[numStudents, numQuestions]; double[,] x = new double[numStudents, numQuestions]; int numRejected = 0, numAttempts = 0; for (int iter = 0; iter < niters; iter++) { for (int student = 0; student < numStudents; student++) { for (int question = 0; question < numQuestions; question++) { // sample prec given ability, difficulty, x // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2) Gamma precPost = precPrior; double xMean = (ability[student] - difficulty[question]) * discrimination[question]; double delta = x[student, question] - xMean; Gamma like = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta); precPost.SetToProduct(precPost, like); prec[student, question] = precPost.Sample(); // sample x given ability, difficulty, prec, data // using an independence chain MH bool y = (data[student, question] > 0); double sign = y ? 1.0 : -1.0; Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]); // we want to sample from xPrior*I(x>0) // instead we sample from xPost Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior); double oldx = x[student, question]; double newx = xPost.Sample(); numAttempts++; if (newx * sign < 0) { newx = oldx; // rejected numRejected++; } else { // importance weights double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx); double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx); // acceptance ratio double paccept = Math.Exp(neww - oldw); if (paccept < 1 && Rand.Double() > paccept) { newx = oldx; // rejected numRejected++; } } x[student, question] = newx; if (iter >= burnin) { double responseProb = MMath.Logistic(xMean); responseProbMean[student, question] += responseProb; } } } if (abilityObserved == null) { // sample ability given difficulty, prec, x for (int student = 0; student < numStudents; student++) { Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec); for (int question = 0; question < numQuestions; question++) { // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec) Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]); post.SetToProduct(post, abilityLike); } ability[student] = post.Sample(); if (iter >= burnin) { abilityEstimator[student].Add(post); abilitySamples[student].Add(ability[student]); } } } // sample difficulty given ability, prec, x for (int question = 0; question < numQuestions; question++) { Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec); for (int student = 0; student < numStudents; student++) { // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec) if (discrimination[question] > 0) { Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]); post.SetToProduct(post, like); } } difficulty[question] = post.Sample(); if (iter >= burnin) { //if (difficulty[question] > 100) // Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]); difficultyEstimator[question].Add(post); difficultySamples[question].Add(difficulty[question]); } } if (options.numParams > 1 && discriminationObserved == null) { // sample discrimination given ability, difficulty, prec, x for (int question = 0; question < numQuestions; question++) { // moment-matching on the prior Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1)); Gaussian post = approxPrior; for (int student = 0; student < numStudents; student++) { // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2) double delta = ability[student] - difficulty[question]; if (delta > 0) { Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta); post.SetToProduct(post, like); } } TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity); double olddisc = discrimination[question]; double newdisc = postTrunc.Sample(); // importance weights Func <double, double> priorLogProb = delegate(double d) { double logd = Math.Log(d); return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd); }; double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc); double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc); // acceptance ratio double paccept = Math.Exp(neww - oldw); if (paccept < 1 && Rand.Double() > paccept) { // rejected } else { discrimination[question] = newdisc; } if (iter >= burnin) { discriminationEstimator[question].Add(discrimination[question]); discriminationSamples[question].Add(discrimination[question]); } } } // sample abilityMean given ability, abilityPrec Gaussian abilityMeanPost = abilityMeanPrior; for (int student = 0; student < numStudents; student++) { Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec); abilityMeanPost *= like; } abilityMean = abilityMeanPost.Sample(); // sample abilityPrec given ability, abilityMean Gamma abilityPrecPost = abilityPrecPrior; for (int student = 0; student < numStudents; student++) { Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean); abilityPrecPost *= like; } abilityPrec = abilityPrecPost.Sample(); // sample difficultyMean given difficulty, difficultyPrec Gaussian difficultyMeanPost = difficultyMeanPrior; for (int question = 0; question < numQuestions; question++) { Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec); difficultyMeanPost *= like; } difficultyMean = difficultyMeanPost.Sample(); // sample difficultyPrec given difficulty, difficultyMean Gamma difficultyPrecPost = difficultyPrecPrior; for (int question = 0; question < numQuestions; question++) { Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean); difficultyPrecPost *= like; } difficultyPrec = difficultyPrecPost.Sample(); // sample discriminationMean given discrimination, discriminationPrec Gaussian discriminationMeanPost = discriminationMeanPrior; for (int question = 0; question < numQuestions; question++) { Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec); discriminationMeanPost *= like; } discriminationMean = discriminationMeanPost.Sample(); // sample discriminationPrec given discrimination, discriminationMean Gamma discriminationPrecPost = discriminationPrecPrior; for (int question = 0; question < numQuestions; question++) { Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean); discriminationPrecPost *= like; } discriminationPrec = discriminationPrecPost.Sample(); //if (iter % 1 == 0) // Console.WriteLine("iter = {0}", iter); } //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec); //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec); int numSamplesUsed = niters - burnin; responseProbMean.Scale(1.0 / numSamplesUsed); //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts); difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform())); abilityPost = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform())); if (options.numParams > 1) { discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma())); } abilityCred = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples); difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples); bool saveSamples = false; if (saveSamples) { using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat")) { int q = 11; writer.Write("difficulty", difficultySamples[q]); writer.Write("discrimination", discriminationSamples[q]); } } }
static void Main() { Func <double, double> Result = ((double result) => { return(Math.Pow(Math.E, -result)); }); Console.WriteLine("...bayes_estimation2 Start!!!..."); Class_p pre1 = new Class_p(); MWArray max_m1 = (MWNumericArray)pre1.pre_process(); f_method fsolve_r1 = new f_method(); f_method1 fmincon_r = new f_method1(); Classi integratef = new Classi(); Classi1 integratef1 = new Classi1(); MWCharArray str2 = "theta"; MWCharArray str2f = "n"; string str1tt = "(theta^" + 0.ToString() + "/factorial(" + 0.ToString() + ")*exp(1)^(-theta))", str1t; Console.WriteLine("please assign the number of points you want deal with:"); int i, j, data_range = int.Parse(Console.ReadLine()); for (i = 0; i <= data_range; i++) { str1tt = str1tt + "*" + "(theta^" + i.ToString() + "/factorial(" + i.ToString() + ")*exp(1)^(-theta))"; } string str1ttf = "(theta^n/factorial(n)*exp(1)^(-theta))" + "*" + str1tt; double[,] max_m2 = (double[, ])max_m1.ToArray(); var max_m3 = DenseMatrix.OfArray(max_m2); int s1 = max_m2.GetLength(0), s2 = max_m2.GetLength(1); var result_p = new DenseMatrix((int)Math.Ceiling((double)(s1 / data_range)), s2); var result_p1 = new DenseVector(s2); double[] std_r; long stop_Value = 0, start_Value = 0, freq = 0; QueryPerformanceFrequency(ref freq); //获取CPU频率 QueryPerformanceCounter(ref start_Value); //获取初始前值 var count1f = new DenseMatrix((int)Math.Ceiling((double)(s1 / data_range)), s2); double[] count1t = new double[s2]; MathNet.Numerics.LinearAlgebra.Matrix <double> tmp_m1; MWArray[] argsOut = new MWArray[3]; MWArray[] argsOut1 = new MWArray[2]; MWArray[] argsIn = new MWArray[6]; MWArray[] argsIn1 = new MWArray[5]; MWArray[] argsIn1f = new MWArray[4]; MWArray[] argsOutf1 = new MWArray[1]; MWArray[] argsIn1f1 = new MWArray[7]; MWArray[] argsOutf2 = new MWArray[1]; int exitflag = 0; int allmins = 0; for (i = 0; i < s2; i++) { //s2 / data_range??? for (j = 0; j < (int)Math.Ceiling((double)(s1 / data_range)); j++) { int count1 = 0; if ((j + 1) * data_range - 1 > max_m3.RowCount) { tmp_m1 = max_m3.SubMatrix(j * data_range, data_range - ((j + 1) * data_range - max_m3.ColumnCount), i, 1); } else { tmp_m1 = max_m3.SubMatrix(j * data_range, data_range, i, 1); } for (int ii = 0; ii < tmp_m1.RowCount; ii++) { for (int jj = 0; jj < tmp_m1.ColumnCount; jj++) { if (Math.Abs(tmp_m1[ii, jj]) > 0.9 * max_m3.SubMatrix(0, max_m3.RowCount, i, 1).Enumerate().Max()) { count1++; } } } count1f[j, i] = count1; count1t[i] = count1t[i] + count1; } count1t[i] = count1t[i] / Math.Ceiling((double)(s1 / data_range)); } for (i = 0; i < s2; i++) { //s2 / data_range??? for (j = 0; j < (int)Math.Ceiling((double)(s1 / data_range)); j++) { str1t = "theta^" + count1f[j, i].ToString() + "/factorial(" + ((int)Math.Round(count1f[j, i])).ToString() + ")*exp(1)^(-theta)-" + (count1f[j, i] / data_range).ToString(); MWCharArray str1 = new MWCharArray(str1t); argsIn[0] = str1; argsIn[1] = str2; argsIn[2] = 1; argsIn[5] = 100; argsOut[0] = result_p[j, i]; argsOut[1] = exitflag; argsOut[2] = allmins; if (count1f[j, i] <= count1t[i]) { argsIn[3] = 0; argsIn[4] = count1t[i]; fsolve_r1.fsolve_r1(3, ref argsOut, argsIn); result_p[j, i] = ((MWNumericArray)argsOut[0]).ToScalarDouble(); } else { argsIn[3] = count1t[i]; argsIn[4] = data_range; fsolve_r1.fsolve_r1(3, ref argsOut, argsIn); result_p[j, i] = ((MWNumericArray)argsOut[0]).ToScalarDouble(); } } } std_r = (double[])Std_t1(result_p); for (i = 0; i < s2; i++) { string str3 = "((1/(sqrt(2*pi)*" + std_r[i].ToString() + "))*exp(((theta-" + count1t[i].ToString() + ")^2)/" + "(-2*" + std_r[i].ToString() + "^2)))"; str1tt = str1tt + "*" + str3; argsIn1f[0] = str1tt; argsIn1f[1] = str2; argsIn1f[2] = 0; argsIn1f[3] = data_range; integratef.integrate(1, ref argsOutf1, argsIn1f); result_p1[i] = ((MWNumericArray)argsOutf1[0]).ToScalarDouble(); str1tt = str1tt + "/" + result_p1[i].ToString(); str1tt = str1tt + "*" + str1ttf + "*" + str2f;; argsIn1f1[0] = str1tt; argsIn1f1[1] = str2; argsIn1f1[2] = str2f; argsIn1f1[3] = 0; argsIn1f1[4] = data_range; argsIn1f1[5] = 0; argsIn1f1[6] = data_range; integratef1.integrate1(1, ref argsOutf2, argsIn1f1); result_p1[i] = ((MWNumericArray)argsOutf2[0]).ToScalarDouble(); } //for (i = 0; i < s2; i++) //{ // MWCharArray str1tt1; // //s2 / data_range??? // for (j = 0; j < (int)Math.Ceiling((double)(s1 / data_range)); j++) // { // string str3 = "((1/(sqrt(2*pi)*" + std_r[i].ToString() + "))*exp(((theta-" + count1t[i].ToString() + ")^2)/" + "(-2*" + std_r[i].ToString() + "^2)))"; // str1tt = str1tt + "*" + str3; // str1tt1 = new MWCharArray(str1tt); // argsIn1[0] = str1tt1; // argsIn1[1] = str2; // argsIn1[2] = result_p[j, i]; // argsIn1[3] = 0; // argsIn1[4] = data_range; // argsOut1[0] = result_p[j, i]; // argsOut1[1] = exitflag; // fmincon_r.fmincon_r(2, ref argsOut1, argsIn1); // result_p[j, i] = Result(((MWNumericArray)argsOut1[0]).ToScalarDouble()); // } //} MatlabWriter.Write("result_p1.mat", result_p, "result_p1"); QueryPerformanceCounter(ref stop_Value);//获取终止变量值 var times2 = (stop_Value - start_Value) / (double)freq * 1000; Console.WriteLine("RunTime " + times2 / 1000 + "s"); }
public void RecordInferNETTime() { /**Records time by infer.net*/ /** * Only one W just like in Ali's paper. * In practice, we typically observe multiple sets of observations * where we want to do inference on the same model with the same * parameter. */ Rand.Restart(init_fixed_seed); Vector w = Vector.Zero(d); Rand.Normal(Vector.Zero(d), PositiveDefiniteMatrix.Identity(d), w); // Create the Logistic operator instance only one because we want to use the same // one after a new problem (new seed). // stopwatch for measuring inference time for each problem Stopwatch watch = new Stopwatch(); Type logisticOp = typeof(LogisticOp2); LogisticOp2.Watch = watch; List <long> allInferTimes = new List <long>(); var allPosteriors = new List <VectorGaussian>(); LogisticOp2.IsCollectLogisticMessages = false; LogisticOp2.IsCollectProjMsgs = false; LogisticOp2.IsCollectXMessages = false; for (int seed = seed_from; seed <= seed_to; seed++) { Rand.Restart(seed); double b = 0; // combine the bias term into W Vector[] X; bool[] Y; LogisticRegression.GenData(n, w, b, out X, out Y, seed); Console.Write("Y: "); StringUtils.PrintArray(Y); VectorGaussian wPost; // start the watch watch.Restart(); LogisticRegression.InferCoefficientsNoBias(X, Y, out wPost, epIter, logisticOp); // stop the watch long inferenceTime = watch.ElapsedMilliseconds; allInferTimes.Add(inferenceTime); allPosteriors.Add(wPost); //print Console.WriteLine("n: {0}", n); Console.WriteLine("d: {0}", d); int t = Y.Sum(o => o ? 1 : 0); Console.WriteLine("number of true: {0}", t); Console.WriteLine("True bias: {0}", b); // Vector meanW = wPost.GetMean(); Console.WriteLine("True w: {0}", w); Console.WriteLine("Inferred w: "); Console.WriteLine(wPost); } string fnameM = string.Format("rec_dnet_n{0}_logistic_iter{1}_sf{2}_st{3}.mat", n, epIter, seed_from, seed_to); string recordPathM = Config.PathToSavedFile(fnameM); MatlabWriter writer = new MatlabWriter(recordPathM); writer.Write("allInferTimes", MatrixUtils.ToDouble(allInferTimes)); Vector[] postMeans = allPosteriors.Select(vg => vg.GetMean()).ToArray(); Matrix[] postCovs = allPosteriors.Select(vg => vg.GetVariance()).ToArray(); writer.Write("postMeans", postMeans); writer.Write("postCovs", postCovs); writer.Write("dim", d); writer.Write("n", n); writer.Write("epIter", epIter); writer.Write("seed_from", seed_from); writer.Write("seed_to", seed_to); writer.Write("init_fixed_seed", init_fixed_seed); writer.Dispose(); }
static void Main(string[] args) { #region 保存txt文件格式 //decimal[] myValue = new decimal[] { 1.111111888m, 2.111111888m, 3.111111888m, 4.111111888m }; //double[] myValueDb = new double[myValue.Length]; //for (int i = 0; i < myValue.Length; i++) //{ // myValueDb[i] = (double)decimal.Round(myValue[i], 6); //} //string[] mydateTime = new string[] { DateTime.Now.ToString("F"), DateTime.Now.ToString("F"), DateTime.Now.ToString("F"), DateTime.Now.ToString("F") }; //string fileName = @"C:\Users\asus\Desktop\1.txt"; //using (FileStream fs = File.Open(fileName, FileMode.Create)) //{ // StreamWriter sw = new StreamWriter(fs); // sw.Write("序号|"); // sw.Write(" 时间 |"); // sw.WriteLine(" 值 "); // sw.WriteLine(); // for (int i = 0; i < myValue.Length; i++) // { // sw.Write(" " + (i + 1) + " |" + mydateTime[i].ToString() + " |"); // sw.WriteLine(" " + myValueDb[i].ToString()); // } // sw.Flush(); // sw.Close(); //} #endregion #region 保存csv文件格式 //string[] mydateTime = new string[] { DateTime.Now.ToString("F"), DateTime.Now.ToString("F"), DateTime.Now.ToString("F"), DateTime.Now.ToString("F") }; //decimal[] myValue = new decimal[] { 1.1111118m, 2.1111118m, 3.1111118m, 4.1111118m }; //double[] myValueDb = new double[myValue.Length]; //for (int i = 0; i < myValue.Length; i++) //{ // myValueDb[i] = (double)decimal.Round(myValue[i], 6); //} //string fileName = @"C:\Users\asus\Desktop\1.csv"; //StreamWriter sw = new StreamWriter(fileName, false, Encoding.UTF8); //sw.Write("序号" + "," + "时间" + "," + "值"); //sw.Write(Environment.NewLine); //for (int i = 0; i < myValue.Length; i++) //{ // sw.Write((i + 1) + "," + mydateTime[i] + "," + myValueDb[i]); // sw.Write(Environment.NewLine); //} //sw.Flush(); //sw.Close(); #endregion #region 保存mat格式 FileStream fs = File.Create(@"C:\Users\asus\Desktop\collection2.mat"); fs.Close(); double[] X = new double[] { 1.1111118f, 2.1111118f, 3.1111118f, 4.1111118f, 5.1111118f }; double[] Y = new double[] { 9.1111118f, 9.1111118f, 9.1111118f, 9.1111118f, 10.1111118f }; double[,] X_Y = new double[5, 2]; for (int i = 0; i < X.Length; i++) { X_Y[i, 0] = Math.Round(X[i], 7); X_Y[i, 1] = Math.Round(Y[i], 7); } var mb = Matrix <double> .Build; var myMatrix = mb.Dense(X.Length, 2, (i, j) => X_Y[i, j]); MatlabWriter.Write <double>(@"C:\Users\asus\Desktop\collection2.mat", myMatrix, "a"); Console.WriteLine(myMatrix.ToString("0.000000")); Console.ReadKey(); #endregion }
internal void StudentIsPositiveTest2() { GaussianOp.ForceProper = false; double shape = 1; double mean = -1; Gamma precPrior = Gamma.FromShapeAndRate(shape, shape); Gaussian meanPrior = Gaussian.PointMass(mean); double evExpected; Gaussian xExpected = StudentIsPositiveExact(mean, precPrior, out evExpected); Gaussian xF2 = Gaussian.FromMeanAndVariance(-1, 1); // the energy has a stationary point here (min in both dimensions), even though xF0 is improper Gaussian xB0 = new Gaussian(2, 1); xF2 = Gaussian.FromMeanAndVariance(-4.552, 6.484); //xB0 = new Gaussian(1.832, 0.9502); //xB0 = new Gaussian(1.792, 1.558); //xB0 = new Gaussian(1.71, 1.558); //xB0 = new Gaussian(1.792, 1.5); Gaussian xF0 = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); //Console.WriteLine("xB0 = {0} xF0 = {1}", xB0, xF0); //Console.WriteLine(xF0*xB0); //Console.WriteLine(xF2*xB0); xF2 = new Gaussian(0.8651, 1.173); xB0 = new Gaussian(-4, 2); xB0 = new Gaussian(7, 7); if (false) { xF2 = new Gaussian(mean, 1); double[] xs = EpTests.linspace(0, 100, 1000); double[] logTrue = Util.ArrayInit(xs.Length, i => GaussianOp.LogAverageFactor(xs[i], mean, precPrior)); Normalize(logTrue); xF2 = FindxF4(xs, logTrue, xF2); xF2 = Gaussian.FromNatural(-0.85, 0); xB0 = IsPositiveOp.XAverageConditional(true, xF2); Console.WriteLine("xF = {0} xB = {1}", xF2, xB0); Console.WriteLine("x = {0} should be {1}", xF2 * xB0, xExpected); Console.WriteLine("proj[T*xB] = {0}", GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior) * xB0); double ev = System.Math.Exp(IsPositiveOp.LogAverageFactor(true, xF2) + GaussianOp_Slow.LogAverageFactor(xB0, meanPrior, precPrior) - xF2.GetLogAverageOf(xB0)); Console.WriteLine("evidence = {0} should be {1}", ev, evExpected); return; } if (false) { xF2 = new Gaussian(mean, 1); xF2 = FindxF3(xExpected, evExpected, meanPrior, precPrior, xF2); xB0 = IsPositiveOp.XAverageConditional(true, xF2); Console.WriteLine("xF = {0} xB = {1}", xF2, xB0); Console.WriteLine("x = {0} should be {1}", xF2 * xB0, xExpected); //double ev = Math.Exp(IsPositiveOp.LogAverageFactor(true, xF2) + GaussianOp.LogAverageFactor_slow(xB0, meanPrior, precPrior) - xF2.GetLogAverageOf(xB0)); //Console.WriteLine("evidence = {0} should be {1}", ev, evExpected); return; } if (false) { xF2 = new Gaussian(-2, 10); xF2 = FindxF2(meanPrior, precPrior, xF2); xB0 = IsPositiveOp.XAverageConditional(true, xF2); xF0 = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); Console.WriteLine("xB = {0}", xB0); Console.WriteLine("xF = {0} should be {1}", xF0, xF2); return; } if (false) { xF2 = new Gaussian(-3998, 4000); xF2 = new Gaussian(0.8651, 1.173); xB0 = new Gaussian(-4, 2); xB0 = new Gaussian(2000, 1e-5); xB0 = FindxB(xB0, meanPrior, precPrior, xF2); xF0 = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); Console.WriteLine("xB = {0}", xB0); Console.WriteLine("xF = {0} should be {1}", xF0, xF2); return; } if (false) { //xF2 = new Gaussian(-7, 10); //xF2 = new Gaussian(-50, 52); xB0 = new Gaussian(-1.966, 5.506e-08); //xF2 = new Gaussian(-3998, 4000); xF0 = FindxF(xB0, meanPrior, precPrior, xF2); Gaussian xB2 = IsPositiveOp.XAverageConditional(true, xF0); Console.WriteLine("xF = {0}", xF0); Console.WriteLine("xB = {0} should be {1}", xB2, xB0); return; } if (true) { xF0 = new Gaussian(-3.397e+08, 5.64e+08); xF0 = new Gaussian(-2.373e+04, 2.8e+04); xB0 = new Gaussian(2.359, 1.392); xF0 = Gaussian.FromNatural(-0.84, 0); //xF0 = Gaussian.FromNatural(-0.7, 0); for (int iter = 0; iter < 10; iter++) { xB0 = FindxB(xB0, meanPrior, precPrior, xF0); Gaussian xFt = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); Console.WriteLine("xB = {0}", xB0); Console.WriteLine("xF = {0} should be {1}", xFt, xF0); xF0 = FindxF0(xB0, meanPrior, precPrior, xF0); Gaussian xBt = IsPositiveOp.XAverageConditional(true, xF0); Console.WriteLine("xF = {0}", xF0); Console.WriteLine("xB = {0} should be {1}", xBt, xB0); } Console.WriteLine("x = {0} should be {1}", xF0 * xB0, xExpected); double ev = System.Math.Exp(IsPositiveOp.LogAverageFactor(true, xF0) + GaussianOp_Slow.LogAverageFactor(xB0, meanPrior, precPrior) - xF0.GetLogAverageOf(xB0)); Console.WriteLine("evidence = {0} should be {1}", ev, evExpected); return; } //var precs = EpTests.linspace(1e-6, 1e-5, 200); var precs = EpTests.linspace(xB0.Precision / 11, xB0.Precision, 100); //var precs = EpTests.linspace(xF0.Precision/20, xF0.Precision/3, 100); precs = EpTests.linspace(1e-9, 1e-5, 100); //precs = new double[] { xB0.Precision }; var ms = EpTests.linspace(xB0.GetMean() - 1, xB0.GetMean() + 1, 100); //var ms = EpTests.linspace(xF0.GetMean()-1, xF0.GetMean()+1, 100); //precs = EpTests.linspace(1.0/10, 1.0/8, 200); ms = EpTests.linspace(2000, 4000, 100); //ms = new double[] { xB0.GetMean() }; Matrix result = new Matrix(precs.Length, ms.Length); Matrix result2 = new Matrix(precs.Length, ms.Length); //ms = new double[] { 0.7 }; for (int j = 0; j < ms.Length; j++) { double maxZ = double.NegativeInfinity; double minZ = double.PositiveInfinity; Gaussian maxxF = Gaussian.Uniform(); Gaussian minxF = Gaussian.Uniform(); Gaussian maxxB = Gaussian.Uniform(); Gaussian minxB = Gaussian.Uniform(); Vector v = Vector.Zero(3); for (int i = 0; i < precs.Length; i++) { Gaussian xF = Gaussian.FromMeanAndPrecision(ms[j], precs[i]); xF = xF2; Gaussian xB = IsPositiveOp.XAverageConditional(true, xF); xB = Gaussian.FromMeanAndPrecision(ms[j], precs[i]); //xB = xB0; v[0] = IsPositiveOp.LogAverageFactor(true, xF); v[1] = GaussianOp.LogAverageFactor_slow(xB, meanPrior, precPrior); //v[1] = GaussianOp_Slow.LogAverageFactor(xB, meanPrior, precPrior); v[2] = -xF.GetLogAverageOf(xB); double logZ = v.Sum(); double Z = logZ; if (Z > maxZ) { maxZ = Z; maxxF = xF; maxxB = xB; } if (Z < minZ) { minZ = Z; minxF = xF; minxB = xB; } result[i, j] = Z; result2[i, j] = IsPositiveOp.LogAverageFactor(true, xF) + xF0.GetLogAverageOf(xB) - xF.GetLogAverageOf(xB); //Gaussian xF3 = GaussianOp.SampleAverageConditional_slower(xB, meanPrior, precPrior); //result[i, j] = Math.Pow(xF3.Precision - xF.Precision, 2); //result2[i, j] = Math.Pow((xF2*xB).Precision - (xF*xB).Precision, 2); //result2[i, j] = -xF.GetLogAverageOf(xB); //Gaussian xF2 = GaussianOp.SampleAverageConditional_slow(xB, Gaussian.PointMass(0), precPrior); Gaussian xMarginal = xF * xB; //Console.WriteLine("xF = {0} Z = {1} x = {2}", xF, Z.ToString("g4"), xMarginal); } double delta = v[1] - v[2]; //Console.WriteLine("xF = {0} xB = {1} maxZ = {2} x = {3}", maxxF, maxxB, maxZ.ToString("g4"), maxxF*maxxB); //Console.WriteLine("xF = {0} maxZ = {1} delta = {2}", maxxF, maxZ.ToString("g4"), delta.ToString("g4")); Console.WriteLine("xF = {0} xB = {1} minZ = {2} x = {3}", minxF, minxB, minZ.ToString("g4"), minxF * minxB); } //TODO: change path for cross platform using using (var writer = new MatlabWriter(@"..\..\..\Tests\student.mat")) { writer.Write("z", result); writer.Write("z2", result2); writer.Write("precs", precs); writer.Write("ms", ms); } }