public void Score_Discrete_All(double[] test, double[] actual, int truepos, int trueneg, double accuracy, double precision, double recall, double auc, double fScore) { var scores = Score.ScorePredictions(test, actual); Almost.Equal(truepos, scores.TruePositives, Delta, "True Positives"); Almost.Equal(trueneg, scores.TrueNegatives, Delta, "True Negatives"); Almost.Equal(accuracy, scores.Accuracy, Delta, "Accuracy"); Almost.Equal(precision, scores.Precision, Delta, "Precision"); Almost.Equal(recall, scores.Recall, Delta, "Recall"); Almost.Equal(auc, scores.AUC, Delta, "AUC"); Almost.Equal(fScore, scores.FScore, Delta, "FScore"); }
public void Network_2Layer_Forward_Sigmoid_Test() { var net = Get_2Layer_Network(); for (int row = 0; row < X.Rows; row++) { net.Forward(X[row, VectorType.Row]); for (int output = 0; output < net.Out.Length; output++) { Almost.Equal(Output_Out_2Layer[row, output], net.Out[output].Output, 0.0025); } } }
public void Network_1Layer_Backward_Sigmoid_Test() { var net = Get_1Layer_Network(); var hiddenNodes = net.GetNodes(1); for (int row = 0; row < X.Rows; row++) { Console.WriteLine($"Evaluating Pass {row}..."); net.Forward(X[row, VectorType.Row]); net.Back(y[row, VectorType.Row], null, false); for (int output = 0; output < net.Out.Count(); output++) { Almost.Equal(Delta_Out_1Layer[row, output], net.Out.ElementAt(output).Delta, 0.0001); } for (int hidden = 0; hidden < hiddenNodes.Count(); hidden++) { if (row == 0) { Almost.Equal(Delta2_1Layer_Case1[hidden, VectorType.Col].Sum(), hiddenNodes.ElementAt(hidden).Delta, 0.0001, $"Node: {hiddenNodes.ElementAt(hidden).Label}"); } else { Almost.Equal(Delta2_1Layer_Case2[hidden, VectorType.Col].Sum(), hiddenNodes.ElementAt(hidden).Delta, 0.0001, $"Node: {hiddenNodes.ElementAt(hidden).Label}"); } } for (int input = 0; input < net.In.Length; input++) { if (row == 0) { Almost.Equal(Delta1_1Layer_Case1[input, VectorType.Col].Sum(), net.In[input].Delta, 0.2, $"Node: {net.In[input].Label}"); } else { Almost.Equal(Delta1_1Layer_Case2[input, VectorType.Col].Sum(), net.In[input].Delta, 0.2, $"Node: {net.In[input].Label}"); } } } }
public void Test_Cofi_CostFunction() { Matrix rMat = Y.ToBinary(i => i > 0d); ICostFunction costFunction = new CofiCostFunction() { R = rMat, X = X, Y = Y.Unshape(), Lambda = 0, Regularizer = null, CollaborativeFeatures = X.Cols }; costFunction.Initialize(); double cost = costFunction.ComputeCost(Vector.Combine(X.Unshape(), Theta.Unshape())); Vector grad = costFunction.ComputeGradient(Vector.Combine(X.Unshape(), Theta.Unshape())); Almost.Equal(39.796d, System.Math.Round(cost, 3), 0.001); this.CheckCofiGradient(0); }
public void Vector_GetRandom_Element_Test() { Vector v = new[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; Vector h = new Vector(v.Length); for (int i = 0; i < 10000; i++) { h[(int)v.GetRandom()] += 1; } h = h / 1000; for (int i = 0; i < h.Length; i++) { Almost.Equal(1d, h[i], .1); } }
public void Linear_Regression_Test_House_Predictions_Normal() { // test house prices based on ft-sq and no# bedrooms Matrix x = new [, ] { { 2104, 3 }, { 1600, 3 }, { 2400, 3 }, { 1416, 2 }, { 3000, 4 }, { 1985, 4 }, { 1534, 3 }, { 1427, 3 }, { 1380, 3 }, { 1494, 3 } }; Vector y = new[] { 399900, 329900, 369000, 232000, 539900, 299900, 314900, 198999, 212000, 242500 }; LinearRegressionGenerator generator = new LinearRegressionGenerator() { LearningRate = 0.01, MaxIterations = 400, Lambda = 0, NormalizeFeatures = true }; var model = generator.Generate(x.Copy(), y.Copy()); var priceEqns = model.Predict(new Vector(new double[] { 1650, 3 })); // CK 150929: increased due to improvements in optimisation double actualEqns = 295107.0d; Almost.Equal(actualEqns, System.Math.Round(priceEqns, 0), 5000); }
public void Linear_Regression_Test_House_Predictions_Regularized() { // test house prices based on ft-sq and no# bedrooms Matrix x = new [, ] { { 2104, 3 }, { 1600, 3 }, { 2400, 3 }, { 1416, 2 }, { 3000, 4 }, { 1985, 4 }, { 1534, 3 }, { 1427, 3 }, { 1380, 3 }, { 1494, 3 } }; Vector y = new[] { 399900, 329900, 369000, 232000, 539900, 299900, 314900, 198999, 212000, 242500 }; LinearRegressionGenerator generator = new LinearRegressionGenerator() { LearningRate = 0.01, MaxIterations = 400, Lambda = 1, NormalizeFeatures = true }; var model = generator.Generate(x.Copy(), y.Copy()); var priceGrad = model.Predict(new Vector(new double[] { 1650, 3 })); double actualGrad = 296500.0d; Almost.Equal(actualGrad, System.Math.Round(priceGrad, 0), 5000); }
public void RNN_Unit_Test_1() { var input = new Supervised.NeuralNetwork.Neuron() { ActivationFunction = new Math.Functions.Ident(), Input = 1.0 }; // hh = 0.00845734 var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron() { ActivationFunction = new Math.Functions.Tanh(), UpdateGate = new Math.Functions.Logistic(), ResetGate = new Math.Functions.Logistic(), H = 0, Rb = 0.0, Zb = 0.0, Rh = 0.00822019, Rx = -0.00808389, Zh = 0.00486728, Zx = -0.0040537 }; // Z should equal approx. = 0.49898658 // R should equal approx. = 0.49797904 // htP should equal approx. = 0.00406561 / // H should equal approx. = 0.00202869 Supervised.NeuralNetwork.Edge.Create(input, gru, 0.00845734); double output = gru.Evaluate(); Almost.Equal(0.00422846, output, 0.002, "First pass"); gru.Output = 1.5; double output2 = gru.Evaluate(); Almost.Equal(0.00739980, output2, 0.002, "Second pass"); }
public void Test_Normal_Estimation(int d, int n) { // generate mu and sigma Vector means = Vector.Zeros(d); // assuming diagonal covariance matrix // for generation purposes equal to the // sqrt of the mean (easy to test) Vector sigma = Vector.Zeros(d); for (int i = 0; i < d; i++) { means[i] = Sampling.GetUniform() * 10; sigma[i] = sqrt(means[i]); } Matrix data = Matrix.Zeros(n, d); for (int i = 0; i < n; i++) { for (int j = 0; j < d; j++) { data[i, j] = Sampling.GetNormal(means[j], sigma[j]); } } NormalDistribution dstrb = new NormalDistribution(); dstrb.Estimate(data); var cov = dstrb.Sigma.Diag(); for (int i = 0; i < d; i++) { // test mean (should be 0, but with 10% tolerance) Almost.Equal(diff(means[i], dstrb.Mu[i]), 0, 0.1); // test covariance (should be 0, but with 10% tolerance) Almost.Equal(diff(means[i], cov[i]), 0, 0.1); } }
public void RNN_Unit_Test_2() { var input = new Supervised.NeuralNetwork.Neuron() { ActivationFunction = new Math.Functions.Ident() }; input.Input = 10.0; var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron() { ActivationFunction = new Math.Functions.Tanh(), UpdateGate = new Math.Functions.Logistic(), ResetGate = new Math.Functions.Logistic(), H = 0.0543, Rb = 1.5, Zb = -1.5, Rh = -0.00111453, Rx = 0.00112138, Zh = 0.00899571, Zx = 0.00999628, Hh = 0.00423760 }; Supervised.NeuralNetwork.Edge.Create(input, gru, 1.0); double output = gru.Evaluate(); Almost.Equal(0.24144243, output, 0.00001, "1: Hidden state"); Almost.Equal(0.81923206, gru.R, 0.00001, "1: Reset value"); Almost.Equal(0.19788773, gru.Z, 0.00001, "1: Update value"); input.Input = 20.0; double output2 = gru.Evaluate(); Almost.Equal(0.40416687, output2, 0.00001, "Second pass"); Almost.Equal(0.82085611, gru.R, 0.00001, "2: Reset value"); Almost.Equal(0.21451824, gru.Z, 0.00001, "2: Update value"); }
public void RNN_Unit_Test_2() { var input = new Supervised.NeuralNetwork.Neuron() { ActivationFunction = new Math.Functions.Ident(), Output = 10.0 }; var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron() { ActivationFunction = new Math.Functions.Tanh(), MemoryGate = new Math.Functions.Logistic(), ResetGate = new Math.Functions.Logistic(), H = 0.0543, Rb = 1.5, Zb = -1.5, Rh = -0.00111453, Rx = 0.00112138, Zh = 0.00899571, Zx = 0.00999628, }; Supervised.NeuralNetwork.Edge.Create(input, gru, 1.0); double output = gru.Evaluate(); //Assert.Equal(0.24144242, output, 0.002, "First pass"); Almost.Equal(0.18775503, output, 0.002, "First pass"); input.Output = 20.0; double output2 = gru.Evaluate(); //Assert.Equal(0.40416686, output2, 0.002, "Second pass"); Almost.Equal(0.30399969, output2, 0.002, "Second pass"); }
public void Check_Neural_Network_Gradients() { Matrix xtest = new double[][] { new [] { 0.54030, -0.41615 }, // X1 new [] { 0.54030, -0.41615 }, // X2 new [] { 0.28366, 0.96017 } // X3 }.ToMatrix(); Matrix ytest = new double[][] { new [] { 0d, 0d, 0d, 1d }, // X1 new [] { 0d, 1d, 0d, 0d }, // X2 new [] { 0d, 0d, 1d, 0d } // X3 }.ToMatrix(); Matrix theta1 = new double[][] { //b //1 //2 new [] { 0.1, 0.3, 0.5 }, // h1 new [] { 0.2, 0.4, 0.6 }, // h2 }.ToMatrix(); Matrix theta2 = new double[][] { //b //h1 //h2 new [] { 0.7, 1.1, 1.5 }, //o1 new [] { 0.8, 1.2, 1.6 }, //o2 new [] { 0.9, 1.3, 1.7 }, //o3 new [] { 1.0, 1.4, 1.8 }, //o4 }.ToMatrix(); Matrix delta1 = new double[][] { //b //1 //2 new [] { 0.79393, 0.42896, -0.33039 }, //h1 new [] { 1.05281, 0.56883, -0.43812 }, //h2 }.ToMatrix(); Matrix delta2 = new double[][] { //b //h1 //h2 new [] { 0.888659, 0.456328, 0.481220 }, //O1 new [] { 0.907427, 0.465965, 0.491383 }, //O2 new [] { 0.923305, 0.474118, 0.499981 }, //O3 new [] { -0.063351, -0.032531, -0.034305 }, //O4 }.ToMatrix(); Vector delta3 = new[] { 0.888659, 0.907427, 0.923305, -0.063351 }; Network net = Network.New().Create(2, 4, new numl.Math.Functions.Logistic(), fnWeightInitializer: (l, i, j) => { if (l == 1) { return(theta2[j, i]); } else { return(theta1[j - 1, i]); } }, hiddenLayers: 2); net.Forward(xtest[0, VectorType.Row]); net.Back(ytest[0, VectorType.Row], null, false); var hiddenNodes = net.GetNodes(1); for (int output = 0; output < net.Out.Count(); output++) { Almost.Equal(delta3[output], net.Out.ElementAt(output).Delta, 0.0001); } for (int hidden = 0; hidden < hiddenNodes.Count(); hidden++) { Almost.Equal(delta2[hidden, VectorType.Col].Sum(), hiddenNodes.ElementAt(hidden).Delta, 0.0001, $"Node: {hiddenNodes.ElementAt(hidden).Label}"); } }