Example #1
0
        /// <summary>
        /// Calculates OA grade from LBP features using pretrained PCA an regression with given model.
        /// </summary>
        /// <param name="features">MRELBP features.</param>
        /// <param name="mod">Model that includes PCA eigenvectors, mean feature, and regression weights.</param>
        /// <param name="path">Path to save results.</param>
        /// <param name="samplename">Name of analysed sample. This is saved with estimated grade to results.csv</param>
        /// <param name="grade"></param>
        public static bool FeaturesToGrade(double[,] features, Model mod, string path, string samplename, out string grade)
        {
            // Centering
            double[,] dataAdjust = Processing.SubtractMean(features, mod.mean);

            // Whitening and PCA matrix
            int w = mod.eigenVectors.GetLength(0);

            double[,] transform = new double[w, mod.nComp];
            for (int i = 0; i < w; i++)
            {
                for (int j = 0; j < mod.nComp; j++)
                {
                    transform[i, j] = mod.eigenVectors[i, j] / mod.singularValues[j];
                }
            }
            double[,] PCA = dataAdjust.Dot(transform);

            // Regression
            double[] grades   = PCA.Dot(mod.weights).Add(mod.intercept);
            double[] logistic = PCA.Dot(mod.weightsLog).Add(mod.interceptLog);
            bool     log;

            if (logistic[0] > 0.5)
            {
                Console.WriteLine("Logistic regression estimated sample as degenerated.");
                log = true;
            }
            else
            {
                Console.WriteLine("Logistic regression estimated sample as healthy / mildly degenerated.");
                log = false;
            }

            // Convert estimated grade to string
            if (grades[0] < 0)
            {
                grade = "0.00";
            }
            else if (grades[0] > 3)
            {
                grade = "3.00";
            }
            else
            {
                grade = grades[0].ToString("###0.##", CultureInfo.InvariantCulture);
            }
            return(log);
        }
Example #2
0
        /// <summary>
        /// Calculates OA grade prediction.
        /// </summary>
        /// <param name="mod">Loaded model.</param>
        /// <param name="features">LBP features.</param>
        /// <param name="volume">Data array.</param>
        /// <returns>Returns string containing the OA grade</returns>
        public static string Predict(Model mod, ref int[,] features, ref Rendering.renderPipeLine volume)
        {
            // Default variables
            int threshold = 50;

            int[] size = { 400, 30 };
            //int threshold = 5;
            //int[] size = { 10, 3 };

            // Load default model
            string state = LoadModel(ref mod);

            // Surface extraction
            Processing.SurfaceExtraction(ref volume, threshold, size, out int[,] surfacecoordinates, out byte[,,] surface);

            // Mean and std images
            Processing.MeanAndStd(surface, out double[,] meanImage, out double[,] stdImage);

            //
            // LBP features
            //

            LBPLibrary.Functions.Save(@"C:\Users\sarytky\Desktop\trials\mean.png", meanImage, true);
            LBPLibrary.Functions.Save(@"C:\Users\sarytky\Desktop\trials\std.png", stdImage, true);
            features = LBP(meanImage.Add(stdImage));

            // PCA
            double[,] dataAdjust = Processing.SubtractMean(features.ToDouble());
            double[,] PCA        = dataAdjust.Dot(mod.eigenVectors.ToDouble());

            // Regression
            double[] grade = PCA.Dot(mod.weights).Add(1.5);

            //double sum = CompareGrades(grade);

            return("OA grade: " + grade[0].ToString("####.##", CultureInfo.InvariantCulture));
            //return "Sum of differences between pretrained model and actual grade: " + sum.ToString("###.###", CultureInfo.InvariantCulture);
        }