public static void Main() { modshogun.init_shogun_with_defaults(); int gamma = 3; double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat"); double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat"); double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat"); RealFeatures feats_train = new RealFeatures(); feats_train.set_feature_matrix(traindata_real); RealFeatures feats_test = new RealFeatures(); feats_test.set_feature_matrix(testdata_real); BinaryLabels labels = new BinaryLabels(trainlab); LDA lda = new LDA(gamma, feats_train, labels); lda.train(); Console.WriteLine(lda.get_bias()); //Console.WriteLine(lda.get_w().toString()); foreach(double item in lda.get_w()) { Console.Write(item); } lda.set_features(feats_test); double[] out_labels = LabelsFactory.to_binary(lda.apply()).get_labels(); foreach(double item in out_labels) { Console.Write(item); } }
public void TestLDA() { SignalBus signals = new SignalBus(); Operation op = new LDA(null, signals, null); signals.Reset(); op.Step0(); Assert.IsTrue(signals.MI); Assert.IsTrue(signals.CO); signals.Reset(); op.Step1(); Assert.IsTrue(signals.RO); Assert.IsTrue(signals.II); Assert.IsTrue(signals.CE); signals.Reset(); op.Step2(); Assert.IsTrue(signals.IO); Assert.IsTrue(signals.MI); signals.Reset(); op.Step3(); Assert.IsTrue(signals.RO); Assert.IsTrue(signals.AI); }
public void CreateAndDispose() { using (var lda = new LDA()) { GC.KeepAlive(lda); } }
static void Main(string[] argv) { modshogun.init_shogun_with_defaults(); int gamma = 3; DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat"); DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat"); DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat"); RealFeatures feats_train = new RealFeatures(); feats_train.set_feature_matrix(traindata_real); RealFeatures feats_test = new RealFeatures(); feats_test.set_feature_matrix(testdata_real); Labels labels = new Labels(trainlab); LDA lda = new LDA(gamma, feats_train, labels); lda.train(); Console.WriteLine(lda.get_bias()); Console.WriteLine(lda.get_w().ToString()); lda.set_features(feats_test); DoubleMatrix out_labels = lda.apply().get_labels(); Console.WriteLine(out_labels.ToString()); modshogun.exit_shogun(); }
public void Test_LDA_Performance2000_var(int rep) { int errorCode = 0; try { LDA lda = new LDA(); ILArray <double> X = ILMath.horzcat(ILMath.randn(2, 2000) * 2.0, ILMath.randn(2, 2000) * -2.0); ILLogicalArray labels = ILMath.tological(ILMath.horzcat(ILMath.ones(1, 2000), ILMath.zeros(1, 2000))); labels = labels.Concat(ILMath.tological(ILMath.zeros(1, 2000).Concat(ILMath.ones(1, 2000), 1)), 0); ILPerformer timer = new ILPerformer(); LDA.Hyperplane C; int oldRefMin = ILNumerics.Settings.ILSettings.MinimumRefDimensions; ILNumerics.Settings.ILSettings.MinimumRefDimensions = 2; timer.Tic(); for (int i = 0; i < rep; i++) { C = lda.TrainLDA(X, labels, 0.4); } timer.Toc(); Info("Test_LDA_Performance: with reference - data: 2x2000 run " + rep.ToString() + " times in: " + timer.Duration + "ms"); ILNumerics.Settings.ILSettings.MinimumRefDimensions = 3; timer.Tic(); for (int i = 0; i < rep; i++) { C = lda.TrainLDA(X, labels, 0.4); } timer.Toc(); ILNumerics.Settings.ILSettings.MinimumRefDimensions = oldRefMin; Info("Test_LDA_Performance: without reference - data: 2x2000 run " + rep.ToString() + " times in: " + timer.Duration + "ms"); Success(); }catch (Exception e) { Error(errorCode, e.Message); } }
static async Task Main(string[] args) { Console.OutputEncoding = Encoding.UTF8; ApplicationLogging.SetLoggerFactory(LoggerFactory.Create(lb => lb.AddConsole())); //Need to register the languages we want to use first Catalyst.Models.English.Register(); //Configures the model storage to use the local folder ./catalyst-models/ Storage.Current = new DiskStorage("catalyst-models"); //Download the Reuters corpus if necessary var(train, test) = await Corpus.Reuters.GetAsync(); //Parse the documents using the English pipeline, as the text data is untokenized so far var nlp = Pipeline.For(Language.English); var trainDocs = nlp.Process(train).ToArray(); var testDocs = nlp.Process(test).ToArray(); //Train an LDA topic model on the trainind dateset using (var lda = new LDA(Language.English, 0, "reuters-lda")) { lda.Data.NumberOfTopics = 20; //Arbitrary number of topics lda.Train(trainDocs, Environment.ProcessorCount); await lda.StoreAsync(); } using (var lda = await LDA.FromStoreAsync(Language.English, 0, "reuters-lda")) { foreach (var doc in testDocs) { if (lda.TryPredict(doc, out var topics)) { var docTopics = string.Join("\n", topics.Select(t => lda.TryDescribeTopic(t.TopicID, out var td) ? $"[{t.Score:n3}] => {td.ToString()}" : "")); Console.WriteLine("------------------------------------------"); Console.WriteLine(doc.Value); Console.WriteLine("------------------------------------------"); Console.WriteLine(docTopics); Console.WriteLine("------------------------------------------\n\n"); } } } }
public static void execute(CPU cpu, MEM memory) { switch (memory.getMem(cpu.PC)) { case 0xA9: LDA.LDA_IM(cpu, memory); return; case 0xA5: case 0xB5: case 0xAD: case 0xBD: case 0xB9: case 0xA1: case 0xB1: return; } }
public static void Main() { modshogun.init_shogun_with_defaults(); int gamma = 3; double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat"); double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat"); double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat"); RealFeatures feats_train = new RealFeatures(); feats_train.set_feature_matrix(traindata_real); RealFeatures feats_test = new RealFeatures(); feats_test.set_feature_matrix(testdata_real); BinaryLabels labels = new BinaryLabels(trainlab); LDA lda = new LDA(gamma, feats_train, labels); lda.train(); Console.WriteLine(lda.get_bias()); //Console.WriteLine(lda.get_w().toString()); foreach (double item in lda.get_w()) { Console.Write(item); } lda.set_features(feats_test); double[] out_labels = BinaryLabels.obtain_from_generic(lda.apply()).get_labels(); foreach (double item in out_labels) { Console.Write(item); } modshogun.exit_shogun(); }
//public void Test_SimpleAsyncAlgorithmSample() { // int errorCode = 0; // try { // SimpleAsyncSample sampleAlg = new SimpleAsyncSample(null, 2.0,50,ILMath.randn(3,2)); // sampleAlg.RunAsync(); // while (sampleAlg.State == ILAlgorithmRunningState.Running) { // Info(sampleAlg.Progress * 100 + "\r"); // System.Threading.Thread.Sleep(300); // } // Info(sampleAlg.Result.result.ToString()); // //sampleAlg.Result; // Success(); // } catch(Exception e) { // Error(errorCode,e.Message); // } //} public void Test_LDA() { int errorCode = 0; try { LDA lda = new LDA(); lda.StateChanged += new ILAlgorithmStateChangedEventHandler(lda_StateChanged); lda.ProgressChanged += new ILAlgorithmStateChangedEventHandler(lda_ProgressChanged); ILArray <double> X = new ILArray <double>(new double[] { -2, -2, -3, -3, 2, 2, 3, 3 }, 2, 4); ILLogicalArray labels = new ILLogicalArray(new byte[8] { 0, 1, 0, 1, 1, 0, 1, 0 }, 2, 4); LDA.Hyperplane C = lda.TrainLDA(X, labels, 0.4); if (Object.ReferenceEquals(C, null)) { throw new Exception("LDA: result is null!"); } if (!(C.w is ILArray <double>) || C.w.Dimensions[0] != 2) { throw new Exception("LDA: Results C[0] should be ILArray<double> 2x1"); } if (!(C.b is ILArray <double>) || !C.b.IsScalar) { throw new Exception("LDA: Results C[1] should be ILArray<double> 2x1"); } if (ILMath.abs(C.w[0] - -9.3750) > 1e-8) { throw new Exception("LDA: invalid result: C.w(1) should be : -9.3750"); } if (ILMath.abs(C.w[1] - -9.3750) > 1e-8) { throw new Exception("LDA: invalid result: C.w(2) should be : -9.3750"); } if (ILMath.abs(C.b.GetValue(0)) > 1e-8) { throw new Exception("LDA: invalid result: C.b should be : 0.0!"); } Success(); } catch (Exception e) { Error(errorCode, e.Message); } }
// ReSharper disable once InconsistentNaming public void LDASample() { double[,] d = { { 2.95, 6.63 }, { 2.53, 7.79 }, { 3.57, 5.65 }, { 3.16, 5.47 }, { 2.58, 4.46 }, { 2.16, 6.22 }, { 3.27, 3.52 } }; int[] c = { 0, 0, 0, 0, 1, 1, 1 }; using (var data = new Mat(d.GetLength(0), d.GetLength(1), MatType.CV_64FC1, d)) using (var classes = new Mat(c.Length, 1, MatType.CV_32SC1, c)) using (var lda = new LDA(data, classes)) { using (var eigenvectors = lda.Eigenvectors()) { Assert.Equal(2, eigenvectors.Rows); Assert.Equal(1, eigenvectors.Cols); Assert.Equal(-1.5836, eigenvectors.Get <double>(0), 4); Assert.Equal(-0.659729, eigenvectors.Get <double>(1), 4); } using (var eigenvalues = lda.Eigenvalues()) { Assert.Equal(1, eigenvalues.Rows); Assert.Equal(1, eigenvalues.Cols); Assert.Equal(3.1447, eigenvalues.Get <double>(0), 4); } using (var project = lda.Project(data)) { Assert.Equal(d.GetLength(0), project.Rows); Assert.Equal(1, project.Cols); Assert.Equal(-9.04562, project.Get <double>(0), 5); Assert.Equal(-9.14579, project.Get <double>(1), 5); Assert.Equal(-9.38091, project.Get <double>(2), 5); Assert.Equal(-8.61289, project.Get <double>(3), 5); Assert.Equal(-7.02807, project.Get <double>(4), 5); Assert.Equal(-7.52409, project.Get <double>(5), 5); Assert.Equal(-7.50061, project.Get <double>(6), 5); } } }
public void Test_LDA_Performance200k_10() { int errorCode = 0; try { LDA lda = new LDA(); ILArray <double> X = ILMath.horzcat(ILMath.randn(2, 200000) * 2.0, ILMath.randn(2, 200000) * -2.0); ILLogicalArray labels = ILMath.tological(ILMath.horzcat(ILMath.ones(1, 100000), ILMath.zeros(1, 100000))); labels = labels.Concat(ILMath.tological(ILMath.zeros(1, 100000).Concat(ILMath.ones(1, 100000), 1)), 0); ILPerformer timer = new ILPerformer(); timer.Tic(); LDA.Hyperplane C; for (int i = 0; i < 10; i++) { C = lda.TrainLDA(X, labels, 0.4); } timer.Toc(); Info("Test_LDA_Performance2: data: 2x200000 run 10 times in: " + timer.Duration + "ms"); Success(); }catch (Exception e) { Error(errorCode, e.Message); } }
private Guid trainLDA(ImageDatabase imdb, FrcContext db) { if (!imdb.isSameImageSize || !imdb.isSameTotalImageForUser) { throw new NotImplementedException(); } // 1) Находим средний образ для всей базы исходных данных: var averageMatrix = Matrix <double> .Build.Dense(imdb.ImageHeight, imdb.ImageWidth); foreach (var userMatrixList in trainImageLists) { foreach (var userImage in userMatrixList) { averageMatrix = averageMatrix + userImage; } } var Xaverage = averageMatrix / (totalUserForTrain * totalTrainImageForUser); // 2) Находим средний образ для каждого класса: List <Matrix <double> > XclassAverageList = new List <Matrix <double> >(); foreach (var userMatrixList in trainImageLists) { var averageClassMatrix = Matrix <double> .Build.Dense(imdb.ImageHeight, imdb.ImageWidth); foreach (var userImage in userMatrixList) { averageClassMatrix = averageClassMatrix + userImage; } averageClassMatrix = averageClassMatrix / totalTrainImageForUser; XclassAverageList.Add(averageClassMatrix); } // 3) Вычислим матрицу внутриклассовой(w) ковариации относительно строк(R): var SwR = Matrix <double> .Build.Dense(imdb.ImageHeight, imdb.ImageHeight); for (int i = 0; i < trainImageLists.Count; i++) { foreach (var userImage in trainImageLists[i]) { SwR = SwR + (userImage - XclassAverageList[i]) * (userImage - XclassAverageList[i]).Transpose(); } } // 4) Вычислим матрицу межклассовой(b) ковариации относительно строк(R): var SbR = Matrix <double> .Build.Dense(imdb.ImageHeight, imdb.ImageHeight); foreach (var XclassAverage in XclassAverageList) { SbR = SbR + (XclassAverage - Xaverage) * (XclassAverage - Xaverage).Transpose(); } // 5) Вычислим матрицу внутриклассовой (w) ковариации относительно столбцов (C): var SwC = Matrix <double> .Build.Dense(imdb.ImageWidth, imdb.ImageWidth); for (int i = 0; i < trainImageLists.Count; i++) { foreach (var userImage in trainImageLists[i]) { SwC = SwC + (userImage - XclassAverageList[i]).Transpose() * (userImage - XclassAverageList[i]); } } // 6) Вычислим матрицу межклассовой (b) ковариации относительно столбцов (C): var SbC = Matrix <double> .Build.Dense(imdb.ImageWidth, imdb.ImageWidth); foreach (var XclassAverage in XclassAverageList) { SbC = SbC + (XclassAverage - Xaverage).Transpose() * (XclassAverage - Xaverage); } // 7) Сформируем общую матрицу рассеяния относительно строк: var StotalR = SwR.Inverse() * SbR; // Регуляризация (стр. 349): StotalR = StotalR + Constants.SMALL_VALUE * Matrix <double> .Build.Dense(imdb.ImageHeight, imdb.ImageHeight); // 8) Сформируем общую матрицу рассеяния относительно столбцов: var StotalC = SwC.Inverse() * SbC; // Регуляризация (стр. 349): StotalC = StotalC + Constants.SMALL_VALUE * Matrix <double> .Build.Dense(imdb.ImageWidth, imdb.ImageWidth); // 9) Решим задачу на собственные значения: var ReigResult = StotalR.Evd(); var CeigResult = StotalC.Evd(); // 10) Пропускаем сортировку по eigenvalues и ограничение по 95%. Берём только то, что написано в мнемоническом описании: // 11) Подготовка матриц для преобразования Карунена-Лоева: List <Vector <double> > rVectorList = new List <Vector <double> >(); for (int i = 0; i < md.trainMartixRightDimension; i++) { rVectorList.Add(ReigResult.EigenVectors.Column(i)); } // to do: значения матриц R как-то оказываются слева. Нужно переименовывать везде, а лучше ещё раз уточнить формулы: var eigMatrixLeft = Matrix <double> .Build.DenseOfColumnVectors(rVectorList.ToArray()).Transpose(); List <Vector <double> > cVectorList = new List <Vector <double> >(); for (int i = 0; i < md.trainMartixLeftDimension; i++) { cVectorList.Add(CeigResult.EigenVectors.Column(i)); } var eigMatrixRight = Matrix <double> .Build.DenseOfColumnVectors(cVectorList.ToArray()); var averageImageMatrixString = MatrixHelper.convertToMatrixString(Xaverage); var leftMatrixString = MatrixHelper.convertToMatrixString(eigMatrixLeft); var rightMatrixString = MatrixHelper.convertToMatrixString(eigMatrixRight); db.MatrixStrings.Add(averageImageMatrixString); db.MatrixStrings.Add(leftMatrixString); db.MatrixStrings.Add(rightMatrixString); db.SaveChanges(); var ldaEntity = new LDA { AverageImageMatrixId = averageImageMatrixString.MatrixStringId, LDAId = Guid.NewGuid(), LeftMatrixId = leftMatrixString.MatrixStringId, RightMatrixId = rightMatrixString.MatrixStringId, }; db.LDAs.Add(ldaEntity); db.SaveChanges(); var frs = new Entities.FaceRecognitionSystem { FaceRecognitionSystemId = Guid.NewGuid(), MnemonicDescription = md.originalDescription, Type = "LDA", TypeSystemId = ldaEntity.LDAId, InputImageHeight = imdb.ImageHeight, InputImageWidth = imdb.ImageWidth, CreatedDT = DateTime.UtcNow, }; db.FaceRecognitionSystems.Add(frs); db.SaveChanges(); return(frs.FaceRecognitionSystemId); }
public static async Task LDAllocation() { Storage.Current = new OnlineRepositoryStorage(new DiskStorage("catalyst-models")); //var train = await Corpus.Reuters.GetAsync(); string connectionStr = "Data Source = (localdb)\\MSSQLLocalDB; Initial Catalog = master; Integrated Security = True; Connect Timeout = 30; Encrypt = False; TrustServerCertificate = False; ApplicationIntent = ReadWrite; MultiSubnetFailover = False"; SqlConnection connection = null; SqlCommand command = null; SqlCommand command1 = null; string sqlQuery = "SELECT * FROM TestApplication.dbo.Files where test = 0"; string sqlQuery1 = "SELECT * FROM TestApplication.dbo.Files where test = 1"; var nlp = Pipeline.For(Language.English); try { connection = new SqlConnection(connectionStr); command = new SqlCommand(sqlQuery, connection); command1 = new SqlCommand(sqlQuery1, connection); connection.Open(); SqlDataReader reader = command.ExecuteReader(); System.Collections.Generic.List <Document> train = new System.Collections.Generic.List <Document>(); System.Collections.Generic.List <Document> test = new System.Collections.Generic.List <Document>(); System.Text.RegularExpressions.Regex reg = new System.Text.RegularExpressions.Regex("/[^/]*/"); while (reader.Read()) { NLPFile flTrain = new NLPFile((IDataRecord)reader); //Console.WriteLine(flTrain.FetchData()); var doc = new Document(flTrain.FetchData(), Language.English); string label = reg.Matches(((string)reader[1]))[2].Value; label = label.Trim('/'); doc.Labels.Add(label); train.Add(doc); var trainDocs = nlp.Process(train).ToArray(); using (var lda = new LDA(Language.English, 0, "reuters-lda")) { lda.Data.NumberOfTopics = 20; //Arbitrary number of topics lda.Train(trainDocs, Environment.ProcessorCount); await lda.StoreAsync(); } } reader.Close(); SqlDataReader reader1 = command1.ExecuteReader(); while (reader1.Read()) { NLPFile flTest = new NLPFile((IDataRecord)reader1); //Console.WriteLine(flTest.FetchData()); var doc1 = new Document(flTest.FetchData(), Language.English); string label = reg.Matches(((string)reader[1]))[2].Value; label = label.Trim('/'); doc1.Labels.Add(label); test.Add(doc1); var testDocs = nlp.Process(test).ToArray(); using (var lda = await LDA.FromStoreAsync(Language.English, 0, "reuters-lda")) { foreach (var doc in testDocs) { if (lda.TryPredict(doc, out var topics)) { var docTopics = string.Join("\n", topics.Select(t => lda.TryDescribeTopic(t.TopicID, out var td) ? $"[{t.Score:n3}] => {td.ToString()}" : "")); Console.WriteLine("------------------------------------------"); Console.WriteLine(doc.Value); Console.WriteLine("------------------------------------------"); Console.WriteLine(docTopics); Console.WriteLine("------------------------------------------\n\n"); } } } } reader1.Close(); } catch (Exception e) { Console.WriteLine("Error: " + e.Message); } finally { if (command != null || command1 != null) { command.Dispose(); command1.Dispose(); } if (connection != null) { connection.Close(); connection.Dispose(); } } }
public static double[] CalculateBiases <L>(this Sphere <L> sphere, Vector vector, RHCLib.DistanceDelegate measure, IList <L> labels) { SortedDictionary <L, double?> dictClasses = new SortedDictionary <L, double?>(); foreach (L label in labels) { dictClasses.Add(label, null); } double fProportion = 1.0; int nCount = dictClasses.Count; bool bFirstSphere = true; SphereEx <L> sphLDA; RHCLib.Sphere <L> sphereIteration = sphere.Recognize(vector, measure, ParallelStrategy.SingleThreaded); while (sphereIteration != null && fProportion > 0.0 && nCount > 0) { if (bFirstSphere) { if (dictClasses.ContainsKey(sphereIteration.Label)) { if ((sphLDA = sphereIteration as SphereEx <L>) != null && sphLDA.DiscriminantEx != null) { #region LDA (Uses LDAEx) // You have four cases you need to watch for... // /|\ /|\ 1.0 // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \/ | \ // |---------M---------D--------M---------| 0.5 // Equation: 1 - (h)(x_i) <-- 1 = fProportion because haven't gotten out yet Func <double, double, double> slope = (x1, x2) => { return(0.5 / Math.Abs(x2 - x1)); }; //double[][] data = LDA.MatrixFromVector(vector.Features); //double[][] wTx = LDA.MatrixProduct(sphLDA.Discriminant.Transposed, data); // Project the data double proj = Accord.Math.Matrix.Dot(sphLDA.DiscriminantEx.ProjectionVector, vector.Features); if (proj <= sphLDA.DiscriminantEx.ProjectedLeftMean) { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.DiscriminantEx.ProjectedLeftMean, sphLDA.DiscriminantEx.ProjectedSetMean - sphLDA.Radius) * (sphLDA.DiscriminantEx.ProjectedLeftMean - proj)); } else if (proj > sphLDA.DiscriminantEx.ProjectedLeftMean && proj <= sphLDA.DiscriminantEx.ProjectedSetMean) { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.DiscriminantEx.ProjectedSetMean, sphLDA.DiscriminantEx.ProjectedLeftMean) * (proj - sphLDA.DiscriminantEx.ProjectedLeftMean)); } else if (proj > sphLDA.DiscriminantEx.ProjectedSetMean && proj <= sphLDA.DiscriminantEx.ProjectedRightMean) { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.DiscriminantEx.ProjectedRightMean, sphLDA.DiscriminantEx.ProjectedSetMean) * (sphLDA.DiscriminantEx.ProjectedRightMean - proj)); } else { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.DiscriminantEx.ProjectedSetMean + sphLDA.Radius, sphLDA.DiscriminantEx.ProjectedRightMean) * (proj - sphLDA.DiscriminantEx.ProjectedRightMean)); } #endregion } else if (sphLDA != null && sphLDA.Discriminant != null) { #region Old LDA // You have four cases you need to watch for... // /|\ /|\ 1.0 // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \ / | \ // / | \/ | \ // |---------M---------D--------M---------| 0.5 // Equation: 1 - h(x_i) Func <double, double, double> slope = (x1, x2) => { return(0.5 / Math.Abs(x2 - x1)); }; double[][] data = LDA.MatrixFromVector(vector.Features); double[][] wTx = LDA.MatrixProduct(sphLDA.Discriminant.Transposed, data); // Project the data if (wTx[0][0] <= sphLDA.Discriminant.ProjectedMeanLeft) { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.Discriminant.ProjectedMeanLeft, 0.0) * (sphLDA.Discriminant.ProjectedMeanLeft - wTx[0][0])); } else if (wTx[0][0] > sphLDA.Discriminant.ProjectedMeanLeft && wTx[0][0] <= sphLDA.Discriminant.DecisionPoint) { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.Discriminant.DecisionPoint, sphLDA.Discriminant.ProjectedMeanLeft) * (wTx[0][0] - sphLDA.Discriminant.ProjectedMeanLeft)); } else if (wTx[0][0] > sphLDA.Discriminant.DecisionPoint && wTx[0][0] <= sphLDA.Discriminant.ProjectMeanRight) { dictClasses[sphLDA.Label] = fProportion - (slope(sphLDA.Discriminant.ProjectMeanRight, sphLDA.Discriminant.DecisionPoint) * (sphLDA.Discriminant.ProjectMeanRight - wTx[0][0])); } else { dictClasses[sphLDA.Label] = fProportion - (slope(2 * sphLDA.Radius, sphLDA.Discriminant.ProjectMeanRight) * (wTx[0][0] - sphLDA.Discriminant.ProjectMeanRight)); } #endregion } else { #region Linear dictClasses[sphereIteration.Label] = fProportion - (measure(sphereIteration, vector) * (0.5 / sphereIteration.Radius)); #endregion } fProportion -= dictClasses[sphereIteration.Label].Value; bFirstSphere = false; nCount--; } } else { if (dictClasses.ContainsKey(sphereIteration.Label) && !dictClasses[sphereIteration.Label].HasValue) { dictClasses[sphereIteration.Label] = fProportion; nCount--; } #region Linear // Impossible to have LDA in node that's not a leaf. fProportion -= fProportion * ((sphereIteration.Radius - measure(sphereIteration, vector)) / sphereIteration.Radius); #endregion } sphereIteration = sphereIteration.Parent; } double fSum = dictClasses.Values.Sum(v => v.HasValue ? v.Value : 0.0); return(dictClasses.Values.Select(v => v.HasValue ? v.Value / fSum : 0.0).ToArray()); }