static void Main(string[] args) { string filename = @"..\..\mag.txt"; int i; // load measurement data Matrix <double> data = DelimitedReader.Read <double>(filename, false, "\t", false); // create D 10xnumdata Matrix <double> D = Matrix <double> .Build.Dense(10, data.RowCount); for (i = 0; i < data.RowCount; i++) { D[0, i] = data[i, 0] * data[i, 0]; D[1, i] = data[i, 1] * data[i, 1]; D[2, i] = data[i, 2] * data[i, 2]; D[3, i] = 2.0 * data[i, 1] * data[i, 2]; D[4, i] = 2.0 * data[i, 0] * data[i, 2]; D[5, i] = 2.0 * data[i, 0] * data[i, 1]; D[6, i] = 2.0 * data[i, 0]; D[7, i] = 2.0 * data[i, 1]; D[8, i] = 2.0 * data[i, 2]; D[9, i] = 1.0; } // create S 10x10 Matrix <double> S = D * D.Transpose(); // create sub matrices of S // // - - // S = | S11(6x6) S12(6x4) | // | S12_t(4x6) S22(4x4) | // - - // // create S11 6x6 Matrix <double> S11 = Matrix <double> .Build.Dense(6, 6); S11.SetSubMatrix(0, 0, 6, 0, 0, 6, S); // create S12 6x4 Matrix <double> S12 = Matrix <double> .Build.Dense(6, 4); S12.SetSubMatrix(0, 0, 6, 0, 6, 4, S); // create S12t 4x6 Matrix <double> S12t = Matrix <double> .Build.Dense(4, 6); S12t.SetSubMatrix(0, 6, 4, 0, 0, 6, S); // create S22 4x4 Matrix <double> S22 = Matrix <double> .Build.Dense(4, 4); S22.SetSubMatrix(0, 6, 4, 0, 6, 4, S); // calculate pseudo inverse of S22 Matrix <double> S22_1 = S22.PseudoInverse(); // calculate SS = S11 - S12 * S22_1 * S12t Matrix <double> SS = S11 - S12 * S22_1 * S12t; // Create constraint matrix C Matrix <double> Co = Matrix <double> .Build.DenseOfArray(new double[, ] { { -1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }, { 1.0, -1.0, 1.0, 0.0, 0.0, 0.0 }, { 1.0, 1.0, -1.0, 0.0, 0.0, 0.0 }, { 0.0, 0.0, 0.0, -4.0, 0.0, 0.0 }, { 0.0, 0.0, 0.0, 0.0, -4.0, 0.0 }, { 0.0, 0.0, 0.0, 0.0, 0.0, -4.0 } }); Matrix <double> C = Co.Inverse(); // Calculate E = C * SS Matrix <double> E = C * SS; // calculate eigenvalues wr(6x1) and eigenvectors vr(6x6) of matrix E Evd <double> eigen = E.Evd(); Vector <Complex> wr = eigen.EigenValues; Matrix <double> vr = eigen.EigenVectors; // find the zero based position of the only positive eigenvalue. The associated eigenvector will be in the corresponding column of matrix vr int index = 0; double maxval = wr[0].Real; for (i = 1; i < 6; i++) { if (wr[i].Real > maxval) { maxval = wr[i].Real; index = i; } } // Extract the associated eigenvector v1 Vector <double> v1 = vr.Column(index); // check sign of eigenvector v1 if (v1[0] < 0.0) { v1[0] = -v1[0]; v1[1] = -v1[1]; v1[2] = -v1[2]; v1[3] = -v1[3]; v1[4] = -v1[4]; v1[5] = -v1[5]; } // Calculate v2 = S22a * v1 Vector <double> v2 = (S22_1 * S12t) * v1; // calculate v Vector <double> v = Vector <double> .Build.Dense(10); v[0] = v1[0]; v[1] = v1[1]; v[2] = v1[2]; v[3] = v1[3]; v[4] = v1[4]; v[5] = v1[5]; v[6] = -v2[0]; v[7] = -v2[1]; v[8] = -v2[2]; v[9] = -v2[3]; //At this point, we have found the general equation of the fitted ellipsoid: // Ax² + By² + Cz² + 2Dxy + 2Exz + 2Fyz + 2Gx + 2Hg + 2Iz + J = 0 // where: // A = v[0] - term in x2 // B = v[1] - term in y2 // C = v[2] - term in z2 // D = v[5] - term in xy // E = v[4] - term in xz // F = v[3] - term in yz // G = v[6] - term in x // H = v[7] - term in y // I = v[8] - term in z // J = v[9] - constant term // If we define // - - - - // | A D E | | G | // Q = | D B F | U = | H | // | E F C | | I | // - - - - // // then the center of the ellipsoid can be calculated as the vector B = -Qˉ¹ * U. // The center of the ellipsoid represents the combined bias. Matrix <double> Q = Matrix <double> .Build.Dense(3, 3); Q[0, 0] = v[0]; // A Q[0, 1] = v[5]; // D Q[0, 2] = v[4]; // E Q[1, 0] = v[5]; // D Q[1, 1] = v[1]; // B Q[1, 2] = v[3]; // F Q[2, 0] = v[4]; // E Q[2, 1] = v[3]; // F Q[2, 2] = v[2]; // C Vector <double> U = Vector <double> .Build.Dense(3); U[0] = v[6]; // G U[1] = v[7]; // H U[2] = v[8]; // I // Calculate matrix Q_1, the inverse of matrix Q Matrix <double> Q_1 = Q.Inverse(); // Calculate B = Q_1 * U ( 3x1 = 3x3 * 3x1) Vector <double> B = Q_1 * U; // Calculate combined bias B[0] = -B[0]; // x-axis combined bias B[1] = -B[1]; // y-axis combined bias B[2] = -B[2]; // z-axis combined bias Console.WriteLine("Combined bias:"); Console.WriteLine(B.ToString()); // -1 // Calculate A // // -1 Hm 1/2 // A = -------------------- * Q // sqrt(Bt * Q * B - J) // Calculate btqb = BT * Q * B double btqb = B * Q * B; // Calculate hmb = sqrt(btqb - J). double J = v[9]; double hmb = Math.Sqrt(btqb - J); // Calculate SQ, the square root of matrix Q eigen = Q.Evd(); wr = eigen.EigenValues; vr = eigen.EigenVectors; // normalize eigenvectors double norm1 = Math.Sqrt(vr[0, 0] * vr[0, 0] + vr[0, 1] * vr[0, 1] + vr[0, 2] * vr[0, 2]); vr[0, 0] /= norm1; vr[0, 1] /= norm1; vr[0, 2] /= norm1; double norm2 = Math.Sqrt(vr[1, 0] * vr[1, 0] + vr[1, 1] * vr[1, 1] + vr[1, 2] * vr[1, 2]); vr[1, 0] /= norm2; vr[1, 1] /= norm2; vr[1, 2] /= norm2; double norm3 = Math.Sqrt(vr[2, 0] * vr[2, 0] + vr[2, 1] * vr[2, 1] + vr[2, 2] * vr[2, 2]); vr[2, 0] /= norm3; vr[2, 1] /= norm3; vr[2, 2] /= norm3; Matrix <double> Dz = Matrix <double> .Build.Dense(3, 3); Dz[0, 0] = Math.Sqrt(wr[0].Real); Dz[1, 1] = Math.Sqrt(wr[1].Real); Dz[2, 2] = Math.Sqrt(wr[2].Real); Matrix <double> SQ = (vr * Dz) * vr.Transpose(); double hm = 0.569; Matrix <double> A_1 = SQ * hm / hmb; Console.WriteLine("A-1 matrix:"); Console.WriteLine(A_1.ToString()); // Calculate A to permit comparison with MagCal Matrix <double> A = A_1.Inverse(); Console.WriteLine("A matrix:"); Console.WriteLine(A.ToString()); }
static void Main(string[] args) { // Initializations double alpha = .01; // Alpha values to try .001, .003, .01, .03, .1, .3, 1 double Lambda = 10; int iterations = 1500; string ThetaFile = "Thetasave.csv"; if (args.Length < 2) { Console.WriteLine(mystrings.usage); System.Environment.Exit(-1); } if (!File.Exists(args[0])) { Console.WriteLine("Training file {0} not found!", args[0]); System.Environment.Exit(-1); } if (!File.Exists(args[1])) { Console.WriteLine("Label file {0} not found!", args[1]); System.Environment.Exit(-1); } if (args.Length > 2 && File.Exists(args[2])) { alpha = Convert.ToDouble(args[2]);; } else { Console.WriteLine("Using default alpha {0}", alpha); } string trainingfile = args[0]; string labelfile = args[1]; // maybe add check for csv format?? /* * Delimited Reader Param description * Delimited Reader (only Single, Double, Complex and COmplex32) * second param Sparse (True) or dense matrix (false) * THird Param = delimeter * Fourth has headers (T|F) */ Matrix <double> input = DelimitedReader.Read <double>(trainingfile, false, ",", false); Matrix <double> labels = DelimitedReader.Read <double>(labelfile, false, "'", false); if ((labels.RowCount != input.RowCount)) { Console.WriteLine(mystrings.SamplesDontMatch, labels.RowCount, input.RowCount); } int rows = input.RowCount; int cols = input.ColumnCount; Console.WriteLine("Training Set rows = {0}, Columns = {1}", rows, cols); Console.WriteLine("Lable set rows = {0}, Columns = {1}", labels.RowCount, labels.ColumnCount); // Initial guess for Theta is all zeros and n x 1 vector of zeros, where n is the number of features (columns) Matrix <double> init_theta = Matrix <double> .Build.Dense(cols, 1); Matrix <double> ones_theta = Matrix <double> .Build.Dense(cols, 1, 1); Console.WriteLine(mystrings.running, iterations); /* Investigate using ILNumerics unconstrained optimizaiton code, otherwise will need * to write a gradient descent routine. https://ilnumerics.net/unconstrained-optimization.html * */ Matrix <double> theta = Functions.utilityfunctions.GradientDescent(input, labels, init_theta, alpha, iterations, Lambda); /* StreamWriter checkthis; * try * { * checkthis = new StreamWriter("LearnedTheta.csv"); * checkthis.Close(); * } * catch (IOException) * { * Console.WriteLine("Oopsie"); * } */ if (!utilityfunctions.FileOpen(ThetaFile)) { Console.WriteLine(mystrings.File_in_use, ThetaFile); System.Environment.Exit(-1); } else { DelimitedWriter.Write(ThetaFile, theta, ","); } }
public void CanParseMissingDataCommaDelimitedData() { var data = "a,b,c" + Environment.NewLine + ", , , ," + Environment.NewLine + "\"2.2\", , ,0.3e1" + Environment.NewLine + "'4',0,6" + Environment.NewLine + "," + Environment.NewLine + ",,3, 4" + Environment.NewLine + ",, , ,," + Environment.NewLine + ",,,,," + Environment.NewLine + ", ,,, ," + Environment.NewLine + ", , , ,," + Environment.NewLine; var matrix = DelimitedReader.Read <double>(new StringReader(data), false, ",", true, CultureInfo.InvariantCulture); Assert.AreEqual(9, matrix.RowCount); Assert.AreEqual(5, matrix.ColumnCount); Assert.AreEqual(double.NaN, matrix[0, 0]); Assert.AreEqual(double.NaN, matrix[0, 1]); Assert.AreEqual(double.NaN, matrix[0, 2]); Assert.AreEqual(double.NaN, matrix[0, 3]); Assert.AreEqual(double.NaN, matrix[0, 4]); Assert.AreEqual(2.2, matrix[1, 0]); Assert.AreEqual(double.NaN, matrix[1, 1]); Assert.AreEqual(double.NaN, matrix[1, 2]); Assert.AreEqual(3.0, matrix[1, 3]); Assert.AreEqual(double.NaN, matrix[1, 4]); Assert.AreEqual(4.0, matrix[2, 0]); Assert.AreEqual(0.0, matrix[2, 1]); Assert.AreEqual(6.0, matrix[2, 2]); Assert.AreEqual(double.NaN, matrix[2, 3]); Assert.AreEqual(double.NaN, matrix[2, 4]); Assert.AreEqual(double.NaN, matrix[3, 0]); Assert.AreEqual(double.NaN, matrix[3, 1]); Assert.AreEqual(double.NaN, matrix[3, 2]); Assert.AreEqual(double.NaN, matrix[3, 3]); Assert.AreEqual(double.NaN, matrix[3, 4]); Assert.AreEqual(double.NaN, matrix[4, 0]); Assert.AreEqual(double.NaN, matrix[4, 1]); Assert.AreEqual(3.0, matrix[4, 2]); Assert.AreEqual(4.0, matrix[4, 3]); Assert.AreEqual(double.NaN, matrix[4, 4]); Assert.AreEqual(double.NaN, matrix[5, 0]); Assert.AreEqual(double.NaN, matrix[5, 1]); Assert.AreEqual(double.NaN, matrix[5, 2]); Assert.AreEqual(double.NaN, matrix[5, 3]); Assert.AreEqual(double.NaN, matrix[5, 4]); Assert.AreEqual(double.NaN, matrix[6, 0]); Assert.AreEqual(double.NaN, matrix[6, 1]); Assert.AreEqual(double.NaN, matrix[6, 2]); Assert.AreEqual(double.NaN, matrix[6, 3]); Assert.AreEqual(double.NaN, matrix[6, 4]); Assert.AreEqual(double.NaN, matrix[7, 0]); Assert.AreEqual(double.NaN, matrix[7, 1]); Assert.AreEqual(double.NaN, matrix[7, 2]); Assert.AreEqual(double.NaN, matrix[7, 3]); Assert.AreEqual(double.NaN, matrix[7, 4]); Assert.AreEqual(double.NaN, matrix[8, 0]); Assert.AreEqual(double.NaN, matrix[8, 1]); Assert.AreEqual(double.NaN, matrix[8, 2]); Assert.AreEqual(double.NaN, matrix[8, 3]); Assert.AreEqual(double.NaN, matrix[8, 4]); }
static void Main(string[] args) { CultureInfo customCulture = (CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); customCulture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; /* * Tinker(); * Console.ReadLine(); * Environment.Exit(0); */ NetworkModel model = null; bool teaching = false; int epochRuns = 1; int repeats = 1; string inputFile = null; CommandLine.Parser.Default.ParseArguments <Options>(args) .WithParsed <Options>(opts => { teaching = opts.Teaching; epochRuns = opts.EpochRuns; repeats = opts.Repeats; Debug = opts.Debug; #if !DEBUG try { #endif model = NetworkModel.Load(opts.Model); inputFile = model.Path(opts.Input); #if !DEBUG Console.Clear(); } catch (Exception e) { Console.WriteLine(e.Message); Environment.Exit(1); } #endif }); if (model is null) { Environment.Exit(0); } if (teaching) { Console.WriteLine("Launching model in teaching mode"); // Save newly generated model. if (!model.Loaded) { model.Save(); } var plotModel = new PlotModel { Title = String.Format("{0} model \"{1}\" learning graph", model.Config.Type, model.Name) }; var series = new OxyPlot.Series.LineSeries(); try { var graphMatrix = DelimitedReader.Read <double>(model.Path("graph")); for (int i = 0; i < graphMatrix.RowCount; i++) { series.Points.Add(new DataPoint(i + 1, graphMatrix.Row(i).At(0))); } } catch { } plotModel.Series.Add(series); plotModel.Axes.Add(new OxyPlot.Axes.LinearAxis { Minimum = 1, Position = OxyPlot.Axes.AxisPosition.Bottom, Title = "Epochs", }); plotModel.Axes.Add(new OxyPlot.Axes.LinearAxis { Position = OxyPlot.Axes.AxisPosition.Left, Title = "Cost", }); var epochOffset = series.Points.Count; var epoch = epochOffset; var epochRunStep = epochRuns; var stopwatch = new Stopwatch(); var epochStopwatch = new Stopwatch(); bool forceStop = false; stopwatch.Restart(); var repeat = 1; while (epochRuns == 0 || epoch < epochRuns + epochOffset) { Console.Write("\nepoch {0} / {1}...", ++epoch, epochOffset + epochRuns); var previous = model.GetInfo(); epochStopwatch.Restart(); stopwatch.Start(); var cost = model.RunEpoch(); stopwatch.Stop(); Console.Write(" {0:f2} + {1:f2}s,", stopwatch.Elapsed.TotalSeconds, epochStopwatch.Elapsed.TotalSeconds); Console.Write(" cost {0}", cost); series.Points.Add(new DataPoint(epoch, cost)); if (model.Config.Threshold != null && Math.Abs(cost) <= model.Config.Threshold) { Console.WriteLine("\nError threshold ({0}) reached. Finished learning.", model.Config.Threshold); forceStop = true; } if (epochRuns == 0 || epoch >= epochRuns + epochOffset || forceStop) { forceStop = false; epochRuns += epochRunStep; var pngExporter = new PngExporter(); pngExporter.ExportToFile(plotModel, model.Path("learning-graph.png")); if (repeat++ >= repeats) { DisplayActions(model, series); } else { SaveModel(model, series); } } } } else { if (model.DataTransformer is Model.Transformers.VectorDataTransformer) { Console.WriteLine("Running execution loop\n"); while (true) { Console.WriteLine("Model info:"); Console.WriteLine("==========="); Console.WriteLine(model.GetInfo()); DisplayActions(model); var inputs = Vector <double> .Build.Dense(model.Config.Inputs); for (int i = 0; i < model.Config.Inputs; i++) { inputs[i] = ReadDouble(String.Format("Input[{0}]", i)); } Console.Clear(); Console.WriteLine("Inputs:"); Console.WriteLine("======="); Console.WriteLine(inputs.ToVectorString()); Console.WriteLine("Result:"); Console.WriteLine("======="); foreach (var label in model.Run(inputs)) { Console.WriteLine(label); } Console.WriteLine(); } } else if (model.DataTransformer is Model.Transformers.MnistDataTransformer) { if (!File.Exists(inputFile)) { Console.WriteLine("Input file '{0}' doesn't exist", inputFile); Environment.Exit(1); } var inputs = model.InputTransformer.Transform(inputFile).Row(0); foreach (var label in model.Run(inputs)) { Console.WriteLine(label); } } else { Console.WriteLine("Unsupported model input transformer '{0}'.", model.DataTransformer); Environment.Exit(1); } } }
static void Main(string[] args) { if (!System.Console.IsOutputRedirected) { System.Console.Clear(); } CultureInfo.CurrentCulture = CultureInfo.CreateSpecificCulture("en-US"); System.Console.WriteLine("Logistic Regression ex.2"); System.Console.WriteLine("========================\n"); var M = Matrix <double> .Build; var V = Vector <double> .Build; // Load Data // The first two columns contains the exam scores and the third column // contains the label. Matrix <double> data = DelimitedReader.Read <double>("data\\ex2data1.txt", false, ",", false); Console.WriteLine(data); Matrix <double> X = data.SubMatrix(0, data.RowCount, 0, 2); Vector <double> y = data.Column(2); System.Console.WriteLine("Features:\n"); System.Console.WriteLine(X); System.Console.WriteLine("Label:\n"); System.Console.WriteLine(y); // ==================== Part 1: Plotting ==================== // We start the exercise by first plotting the data to understand the // the problem we are working with. System.Console.WriteLine("Plotting data with + indicating (y = 1) examples and o indicating (y = 0) examples.\n"); PlotData(X, y); GnuPlot.HoldOff(); Pause(); // theta parameters Vector <double> initial_theta = V.Dense(X.ColumnCount + 1); // Add intercept term to X X = X.InsertColumn(0, V.Dense(X.RowCount, 1)); // compute cost LogisticRegression lr = new LogisticRegression(X, y); double J = lr.Cost(initial_theta); Vector <double> grad = lr.Gradient(initial_theta); System.Console.WriteLine("Cost at initial theta (zeros): {0:f3}\n", J); System.Console.WriteLine("Expected cost (approx): 0.693\n"); System.Console.WriteLine("Gradient at initial theta (zeros): \n"); System.Console.WriteLine(" {0:f4} \n", grad); System.Console.WriteLine("Expected gradients (approx):\n -0.1000\n -12.0092\n -11.2628\n"); // Compute and display cost and gradient with non-zero theta Vector <double> test_theta = V.DenseOfArray(new double[] { -24.0, 0.2, 0.2 }); J = lr.Cost(test_theta); grad = lr.Gradient(test_theta); System.Console.WriteLine("\nCost at test theta: {0:f3}\n", J); System.Console.WriteLine("Expected cost (approx): 0.218\n"); System.Console.WriteLine("Gradient at test theta: \n"); System.Console.WriteLine(" {0:f3} \n", grad); System.Console.WriteLine("Expected gradients (approx):\n 0.043\n 2.566\n 2.647\n"); Pause(); // ============= Part 3: Optimizing using fmin ================ // In this exercise, I will use fmin function to find the // optimal parameters theta. var obj = ObjectiveFunction.Gradient(lr.Cost, lr.Gradient); var solver = new BfgsMinimizer(1e-5, 1e-5, 1e-5, 1000); var result = solver.FindMinimum(obj, initial_theta); System.Console.WriteLine("Cost at theta found by fmin: {0:f5} after {1} iterations\n", result.FunctionInfoAtMinimum.Value, result.Iterations); System.Console.WriteLine("Expected cost (approx): 0.203\n"); System.Console.WriteLine("theta: \n"); System.Console.WriteLine(result.MinimizingPoint); System.Console.WriteLine("Expected theta (approx):\n"); System.Console.WriteLine(" -25.161\n 0.206\n 0.201\n"); Pause(); PlotLinearBoundary(X, y, result.MinimizingPoint); GnuPlot.HoldOff(); // ============== Part 4: Predict and Accuracies ============== // After learning the parameters, you'll like to use it to predict the outcomes // on unseen data. In this part, you will use the logistic regression model // to predict the probability that a student with score 45 on exam 1 and // score 85 on exam 2 will be admitted. // // Furthermore, you will compute the training and test set accuracies of // our model. // // Your task is to complete the code in predict.m // Predict probability for a student with score 45 on exam 1 // and score 85 on exam 2 double prob = LogisticRegression.Predict(V.DenseOfArray(new [] { 1.0, 45.0, 85.0 }), result.MinimizingPoint); System.Console.WriteLine("For a student with scores 45 and 85, we predict an admission probability of {0:f5}\n", prob); System.Console.WriteLine("Expected value: 0.775 +/- 0.002\n\n"); Pause(); // Compute accuracy on our training set Vector <double> pos = LogisticRegression.Predict(X, result.MinimizingPoint); Func <double, double> map = delegate(double d){ if (d >= 0.5) { return(1); } else { return(0); } }; pos = pos.Map(map); Vector <double> comp = V.Dense(y.Count); for (int i = 0; i < y.Count; i++) { if (pos[i] == y[i]) { comp[i] = 1; } else { comp[i] = 0; } } double accurancy = comp.Mean() * 100; System.Console.WriteLine("Train Accuracy: {0:f5}\n", accurancy); System.Console.WriteLine("Expected accuracy (approx): 89.0\n"); System.Console.WriteLine("\n"); }
static void Main(string[] args) { if (!System.Console.IsOutputRedirected) { System.Console.Clear(); } CultureInfo.CurrentCulture = CultureInfo.CreateSpecificCulture("en-US"); System.Console.WriteLine("Linear Regression ex.1"); System.Console.WriteLine("======================\n"); // ==================== Part 1: Basic Function ==================== var M = Matrix <double> .Build; var V = Vector <double> .Build; // load data Matrix <double> data = DelimitedReader.Read <double>("data\\ex1data1.txt", false, ",", false); Console.WriteLine(data); Matrix <double> X = data.Column(0).ToColumnMatrix(); Matrix <double> y = data.Column(1).ToColumnMatrix(); int m = X.RowCount; // ======================= Part 2: Plotting ======================= System.Console.WriteLine("Plotting data...."); GnuPlot.HoldOn(); PlotData(X.Column(0).ToArray(), y.Column(0).ToArray()); Pause(); // =================== Part 3: Cost and Gradient descent =================== System.Console.WriteLine("Cost and Gradient descent...."); // Add a column of ones to X X = X.InsertColumn(0, V.Dense(m, 1)); System.Console.WriteLine(X); // initialize fitting parameters Matrix <double> theta = M.Dense(2, 1); double J = ComputeCost(X, y, theta); System.Console.WriteLine("With theta = [0 ; 0]\nCost computed = {0:f}\n", J); System.Console.WriteLine("Expected cost value (approx) 32.07\n"); // initialize fitting parameters theta[0, 0] = -1; theta[1, 0] = 2; J = ComputeCost(X, y, theta); System.Console.WriteLine("With theta = [-1 ; 2]\nCost computed = {0:f}\n", J); System.Console.WriteLine("Expected cost value (approx) 54.24\n"); // run gradient descent System.Console.WriteLine("\nRunning Gradient Descent ...\n"); int iterations = 1500; double alpha = 0.01; theta = M.Dense(2, 1); (Matrix <double> theta, Matrix <double> J_history)res; res = GradientDescent(X, y, theta, alpha, iterations); theta = res.theta; // print theta to screen System.Console.WriteLine("Theta found by gradient descent:\n"); System.Console.WriteLine(theta); System.Console.WriteLine("Expected theta values (approx)\n"); System.Console.WriteLine(" -3.6303\n 1.1664\n\n"); Matrix <double> h = X * theta; // hypothesys Matrix <double> x = X.RemoveColumn(0); // remove x0 PlotLinearFit(x.Column(0).ToArray(), h.Column(0).ToArray()); GnuPlot.HoldOff(); var predict1 = M.DenseOfArray(new double[, ] { { 1, 3.5 } }) * theta; System.Console.WriteLine("For population = 35,000, we predict a profit of {0:F4}", predict1[0, 0] * 10000); var predict2 = M.DenseOfArray(new double[, ] { { 1, 7 } }) * theta; System.Console.WriteLine("For population = 70,000, we predict a profit of {0:F4}", predict2[0, 0] * 10000); Pause(); // ============= Part 4: Visualizing J(theta_0, theta_1) ============= System.Console.WriteLine("Visualizing J(theta_0, theta_1) ...\n"); PlotJ(X, y, theta); Pause(); }
static void Main1(string[] args) { //initialize the translation matrix Matrix <double> transMatrix = DelimitedReader.Read <double>(@"C:\Mannheim Uni\New folder\m.csv", false, ",", false); //initialize the vocabulary index dictionary Wordindenx(); //initialize the word embedding vector var dv = dicVic(); Console.WriteLine("dictionay vec loaded"); //declare a dictionary for the documents embeddings Dictionary <int, Vector <double> > docEm = new Dictionary <int, Vector <double> >(); //declare a indexer int i = 0; //declare a variable for the weights' sum double WeightSum = 0; //declare a vector for the sum of the word vector embeddings Vector <double> vecSum = Vector.Build.Dense(300); //loop for read the dataset in English foreach (var line in File.ReadLines(@"C:\Mannheim Uni\New folder\train_en.txt")) { if (Convert.ToInt32(line.Split()[0]) == i) { if (dv.ContainsKey(wIndexer[line.Split()[1]])) { //Obtain the word embedding Vector <double> V = dv[wIndexer[line.Split()[1]]]; //multiply the Vector with the translation matrix V = V * transMatrix; //multiply the Vector with the word weight V = V * Convert.ToDouble(line.Split()[2]); //aggregate the vectors vecSum += V; WeightSum += Convert.ToDouble(line.Split()[2]); } } else { //aggregate the vectors and divide them on the weights sum vecSum = vecSum / WeightSum; // add the final document embedding vector to the dictionary docEm.Add(i, vecSum); WeightSum = 0; vecSum = 0 * vecSum; Console.WriteLine(i); i++; } } Console.WriteLine("done"); //write the final document embadding in excel file using (StreamWriter file = new StreamWriter(@"C:\Mannheim Uni\New folder\test - Copy.csv")) { foreach (var item in docEm) { file.Write(item.Key + "," + string.Join(" ", item.Value.ToArray())); file.Write(Environment.NewLine); } } Console.WriteLine("Press any key to exit"); Console.ReadKey(); }
private void button1_Click(object sender, EventArgs e) { // Read first file string fileOne = "matrixone.csv"; Matrix <double> fileMatrixOne = DelimitedReader.Read <double>(fileOne, false, ",", false); // Write in textBox1 the information about fileMatrixOne textBox1.AppendText("\n"); textBox1.AppendText(fileMatrixOne.ToString("F2")); textBox1.AppendText("matrix1 rows: " + fileMatrixOne.RowCount.ToString("F2")); textBox1.AppendText("\n"); textBox1.AppendText("matrix1 columns: " + fileMatrixOne.ColumnCount.ToString("F2")); textBox1.AppendText("\n"); // FirstCSV file into a multidimensional array // string[][] numOne = File.ReadLines(@"C:\Users\PC\Desktop\matrix1.csv").Select(s => s.Split(",".ToCharArray())).ToArray().ToArray(); // textBox1.AppendText("stringOne : " + numOne[1][0]); // Read second file string fileTwo = "matrixtwo.csv"; Matrix <double> fileMatrixTwo = DelimitedReader.Read <double>(fileTwo, false, ",", false); // Write in textBox1 the information about fileMatrixTwo textBox1.AppendText("\n"); textBox1.AppendText(fileMatrixTwo.ToString("F2")); textBox1.AppendText("matrix2 rows: " + fileMatrixTwo.RowCount.ToString("F2")); textBox1.AppendText("\n"); textBox1.AppendText("matrix2 columns: " + fileMatrixTwo.ColumnCount.ToString("F2")); textBox1.AppendText("\n"); // FirstCSV file into a multidimensional array // string[][] numTwo = File.ReadLines(@"C:\Users\PC\Desktop\matrix2.csv").Select(s => s.Split(",".ToCharArray())).ToArray().ToArray(); // textBox1.AppendText("stringOne : " + numTwo[1][0]); if (fileMatrixTwo.RowCount == fileMatrixOne.RowCount && fileMatrixOne.ColumnCount == fileMatrixTwo.ColumnCount) { //TASK ONE textBox1.AppendText("\n"); textBox1.AppendText("\nTASK ONE"); Matrix <double> resultSum = DelimitedReader.Read <double>(fileTwo, false, ",", false); resultSum.Add(fileMatrixOne, resultSum); textBox1.AppendText("\n"); textBox1.AppendText("Sumation:"); textBox1.AppendText(resultSum.ToString("F2")); double[,] resultArray = new double[resultSum.RowCount, resultSum.ColumnCount]; //textBox1.AppendText(resultArray[1,1].ToString("F2")); bool antisymmetrical = true; // antisymmetrical matrix if (fileMatrixOne.RowCount == fileMatrixOne.ColumnCount && fileMatrixTwo.RowCount == fileMatrixTwo.ColumnCount) { for (int i = 0; i < resultSum.ColumnCount; i++) { for (int j = 0; j < resultSum.RowCount; j++) { if (j != i) { //textBox1.AppendText("Symmetrical\n"); antisymmetrical = resultSum[j, i] != resultSum[i, j] ? true : false; if (!antisymmetrical) { textBox1.AppendText("\n"); textBox1.AppendText("\nAntiSymmetrical:\n"); textBox1.AppendText("false\n"); i = resultSum.ColumnCount; j = resultSum.RowCount; } } } } if (antisymmetrical) { textBox1.AppendText("\n"); textBox1.AppendText("\nAntiSymmetrical: \n"); textBox1.AppendText("true\n"); } } else { textBox1.AppendText("\n"); textBox1.AppendText("\nAntiSymmetrical: \n"); textBox1.AppendText("false\n"); } //TASK TWO textBox1.AppendText("\n"); textBox1.AppendText("\nTASK TWO"); Matrix <double> resultMul = DelimitedReader.Read <double>(fileTwo, false, ",", false); resultMul.Multiply(fileMatrixOne, resultMul); textBox1.AppendText("\n"); textBox1.AppendText("Multiplication"); textBox1.AppendText(resultSum.ToString("F2")); //double[,] resultArray = new double[resultMul.RowCount, resultMul.ColumnCount]; //textBox1.AppendText(resultArray[1,1].ToString("F2")); bool symmetrical = true; // antisymmetrical matrix if (fileMatrixOne.RowCount == fileMatrixOne.ColumnCount && fileMatrixTwo.RowCount == fileMatrixTwo.ColumnCount) { for (int i = 0; i < resultSum.ColumnCount; i++) { for (int j = 0; j < resultSum.RowCount; j++) { if (j != i) { //textBox1.AppendText("Symmetrical\n"); symmetrical = resultSum[j, i] == resultSum[i, j] ? true : false; if (!symmetrical) { textBox1.AppendText("\n"); textBox1.AppendText("\nSymmetrical:\n"); textBox1.AppendText("false\n"); i = resultSum.ColumnCount; j = resultSum.RowCount; } } } } if (symmetrical) { textBox1.AppendText("\n"); textBox1.AppendText("\nSymmetrical: \n"); textBox1.AppendText("true\n"); } } else { textBox1.AppendText("\n"); textBox1.AppendText("\nSymmetrical: \n"); textBox1.AppendText("false\n"); } } else { textBox1.AppendText("\n"); textBox1.AppendText("Undefined Matrices!"); } //Matrix<Complex32> x = Matrix<Complex32>.Build.Dense(4, 4); //textBox1.AppendText(x.ToString("F2")); //Matrix<Complex32> y = Matrix<Complex32>.Build.Dense(4, 4, (i, j) => new Complex32(1.0f, 1.0f)); //textBox1.AppendText(y.ToString("F2")); //x = y; //y.CopyTo(x); //x.Add(new Complex32(10.0f, 0), x); //y.Multiply(new Complex32(2.0f, 0), n); //textBox1.AppendText(n.ToString("F2")); //textBox1.AppendText(x.ToString("F2")); //textBox1.AppendText(y.ToString("F2")); //textBox1.AppendText(y.Column(1).ToString("F2")); //Vector<Complex32> z = y.ColumnSums(); //textBox1.AppendText(z.ToString("F2")); }
static void Main(string[] args) { try { var gFile = @"C:\Users\alexismayfire\Desktop\CSM30\Trabalho 2\Imagem-A\g-1.txt"; var hFile = @"C:\Users\alexismayfire\Desktop\CSM30\Trabalho 2\Imagem-A\H-1.txt"; var M = Matrix <double> .Build; var V = Vector <double> .Build; int rows = 50816; int columns = 3600; StreamReader sr = new StreamReader(gFile); try { Console.WriteLine(DateTime.Now); double[] temp = new double[rows]; using (sr) { string line; int i = 0; while ((line = sr.ReadLine()) != null) { temp[i] = Double.Parse(line); i++; } } var v = V.Dense(temp); Console.WriteLine("Arquivo g lido"); Console.WriteLine(DateTime.Now); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); } sr.Dispose(); sr = new StreamReader(hFile); try { Console.WriteLine(DateTime.Now); Matrix <double> matrix = M.Sparse(rows, columns); matrix = DelimitedReader.Read <double>(sr, false, ",", false, null, null); Console.WriteLine("Arquivo H lido"); Console.WriteLine(DateTime.Now); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); } sr.Dispose(); Console.Read(); } catch (Exception e) { Console.WriteLine("The file could not be read:"); Console.WriteLine(e.Message); } }
static void Main(string[] args) { if (!System.Console.IsOutputRedirected) { System.Console.Clear(); } CultureInfo.CurrentCulture = CultureInfo.CreateSpecificCulture("en-US"); var M = Matrix <double> .Build; var V = Vector <double> .Build; System.Console.WriteLine("Linear Regression ex.1 multiple variables"); System.Console.WriteLine("=========================================\n"); // load data System.Console.WriteLine("Loading data ...\n"); Matrix <double> data = DelimitedReader.Read <double>("data\\ex1data2.txt", false, ",", false); Matrix <double> X = data.SubMatrix(0, data.RowCount, 0, 2); Matrix <double> y = data.SubMatrix(0, data.RowCount, 2, 1); int m = X.RowCount; // Print out some data points System.Console.WriteLine("First 10 examples from the dataset: \n"); var temp = M.DenseOfMatrixArray(new [, ] { { X.SubMatrix(0, 10, 0, 2), y.SubMatrix(0, 10, 0, 1) } } ); Console.WriteLine(temp); // Scale features and set them to zero mean System.Console.WriteLine("Normalizing Features ...\n"); (Matrix <double> X_norm, Matrix <double> mu, Matrix <double> sigma)norm_res; norm_res = FeatureNormalize(X); System.Console.WriteLine($"mu: {norm_res.mu}"); System.Console.WriteLine($"sigma: {norm_res.sigma}"); System.Console.WriteLine($"X_norm: {norm_res.X_norm}"); // Add intercept term to X X = norm_res.X_norm; X = X.InsertColumn(0, V.Dense(X.RowCount, 1)); // Running gradient descent ... System.Console.WriteLine("Running gradient descent ...\n"); // Choose some alpha value double alpha = 0.01; int num_iters = 50; GnuPlot.HoldOn(); Matrix <double> theta = M.Dense(3, 1); (Matrix <double> theta, Matrix <double> J_history)res_grad1 = GradientDescentMulti(X, y, theta, alpha, num_iters); PlotJ(res_grad1.J_history, "{/Symbol a}=" + alpha, "blue"); theta = M.Dense(3, 1); alpha = 0.03; (Matrix <double> theta, Matrix <double> J_history)res_grad2 = GradientDescentMulti(X, y, theta, alpha, num_iters); PlotJ(res_grad2.J_history, "{/Symbol a}=" + alpha, "red"); theta = M.Dense(3, 1); alpha = 0.1; (Matrix <double> theta, Matrix <double> J_history)res_grad3 = GradientDescentMulti(X, y, theta, alpha, num_iters); PlotJ(res_grad3.J_history, "{/Symbol a}=" + alpha, "black"); theta = M.Dense(3, 1); alpha = 0.3; (Matrix <double> theta, Matrix <double> J_history)res_grad4 = GradientDescentMulti(X, y, theta, alpha, num_iters); PlotJ(res_grad4.J_history, "{/Symbol a}=" + alpha, "green"); GnuPlot.HoldOff(); // Display gradient descent's result System.Console.WriteLine("Theta computed from gradient descent: \n"); System.Console.WriteLine(res_grad4.theta); System.Console.WriteLine("\n"); // Estimate the price of a 1650 sq-ft, 3 br house // Recall that the first column of X is all-ones. Thus, it does // not need to be normalized. double x1 = 1650; double x2 = 3; x1 = (x1 - norm_res.mu[0, 0]) / norm_res.sigma[0, 0]; x2 = (x2 - norm_res.mu[0, 1]) / norm_res.sigma[0, 1]; Matrix <double> K = Matrix <double> .Build.DenseOfRowArrays(new double[] { 1, x1, x2 }); System.Console.WriteLine("K is:"); System.Console.WriteLine(K); double price = (K * res_grad4.theta)[0, 0]; System.Console.WriteLine("Predicted price of a 1650 sq-ft, 3 br house (using gradient descent):\n ${0:f}\n", price); Pause(); // ================================================================= // NORMAL EQUATION // ================================================================= System.Console.WriteLine("Solving with normal equations...\n"); // load data System.Console.WriteLine("Loading data ...\n"); data = DelimitedReader.Read <double>("data\\ex1data2.txt", false, ",", false); X = data.SubMatrix(0, data.RowCount, 0, 2); y = data.SubMatrix(0, data.RowCount, 2, 1); m = X.RowCount; // Add intercept term to X X = X.InsertColumn(0, V.Dense(X.RowCount, 1)); // Calculate the parameters from the normal equation theta = normalEqn(X, y); System.Console.WriteLine("Theta computed from the normal equations: \n"); System.Console.WriteLine(theta); System.Console.WriteLine("\n"); K = Matrix <double> .Build.DenseOfRowArrays(new double[] { 1, 1650, 3 }); price = (K * theta)[0, 0]; System.Console.WriteLine("Predicted price of a 1650 sq-ft, 3 br house (using normal equation):\n ${0:f}\n", price); Pause(); }