Beispiel #1
0
 private static void JudgePerformance(ref MLPerformance mlp, bool label, bool prediction)
 {
     if (label)
     {
         if (prediction)
         {
             mlp.tp++;
         }
         else
         {
             mlp.fn++;
         }
     }
     else
     {
         if (prediction)
         {
             mlp.fp++;
         }
         else
         {
             mlp.tn++;
         }
     }
 }
Beispiel #2
0
        private static MLPerformance EvaluateResults(double[] label, double[] probabilities, double threshold)
        {
            MLPerformance mlp = new MLPerformance();

            for (int i = 0; i < label.Length; i++)
            {
                JudgePerformance(ref mlp, (int)label[i] == 1, probabilities[i] >= threshold);
            }
            return(mlp);
        }
Beispiel #3
0
        private static MLPerformance EvaluateResults(double[] label, bool[] predictions)
        {
            MLPerformance mlp = new MLPerformance();

            for (int i = 0; i < label.Length; i++)
            {
                JudgePerformance(ref mlp, (int)label[i] == 1, predictions[i]);
            }
            return(mlp);
        }
Beispiel #4
0
        public static void TestLRModel(String lrSavedCoeff, String trainingFile, String testingFile)
        {
            var lr = Loader.LoadAccordNetLogisticRegressionModel(lrSavedCoeff);

            IDataView trainingSet = LoadData(mlContext, trainingFile);
            IDataView testingSet  = LoadData(mlContext, testingFile);

            double[] correctLabels = IDataViewToAccord(testingSet).labels;

            List <ROCDataPoint> AccordROC = ROCCurve(lr, trainingSet);
            double settedThreshold        = ThresholdFromROC(AccordROC, minimumSpecificity: 0.95);
            double lrAUC = GetAUCFromROC(AccordROC);

            bool[]        lrOutput         = AccordDecide(lr, testingSet, settedThreshold);
            MLPerformance accordEvaluation = EvaluateResults(correctLabels, lrOutput);

            Console.WriteLine("AUC:{0}\tSpecificity:{1}\tAccuracy:{2}\tThreshold:{3}", lrAUC, accordEvaluation.getSpecificity(), accordEvaluation.getAccuracy(), settedThreshold);
        }
Beispiel #5
0
        public static void DoJob()
        {
            //String testingFile = InputFileOrganizer.DataRoot + "DCN_TestingSet120_NoDecoy.csv";
            //String trainingFile = InputFileOrganizer.DataRoot + "240minTestingSetDCN_NoDecoy.csv";
            String testingFile  = "C:\\Coding\\2019LavalleeLab\\GitProjectRealTimeMS\\TestData\\240minTestingSetDCN_NoDecoy.csv";
            String trainingFile = "C:\\Coding\\2019LavalleeLab\\GitProjectRealTimeMS\\TestData\\240minTestingSetDCN_NoDecoy.csv";


            IDataView trainingSet = LoadData(mlContext, trainingFile);
            IDataView testingSet  = LoadData(mlContext, testingFile);

            sw = new StreamWriter(Path.Combine(InputFileOrganizer.OutputFolderOfTheRun, "LogisticRegressionTrainerOutput.txt"));
            StreamWriter sw2 = new StreamWriter(Path.Combine(InputFileOrganizer.OutputFolderOfTheRun, "AccordWeight.txt"));

            double[] correctLabels = IDataViewToAccord(testingSet).labels;


            var lrAccord = TrainAccordModel(trainingSet);


            Console.WriteLine("Accord weights");
            foreach (double w in lrAccord.Weights)
            {
                Console.WriteLine(w);
                sw2.Write(w + "\t");
            }
            Console.WriteLine("intercept: {0}", lrAccord.Intercept);
            sw2.WriteLine();
            sw2.WriteLine(lrAccord.Intercept);
            sw2.Close();


            Console.WriteLine("--------------------");
            //LogisticRegression lrAccord = new LogisticRegression();
            ////spark weights
            //lrAccord.Weights = new double[] { -0.014101223713988448, 0.40498899120575244, -0.4050931006103277, -0.6514251562095439, -1.4199639211914807, -0.00154170434120518, -0.0017589165180070616, -0.001427050540781882, -0.006890591731651152, 0.23434955458842885, 0.24386505335051745, 0.25265687551174654, 0.34976191542247076, 0.17989186249395828, 0.15598728100439885 };
            //lrAccord.Intercept = -2.0771355924182346;
            List <ROCDataPoint> AccordROC = ROCCurve(lrAccord, trainingSet);
            double lrAUC = GetAUCFromROC(AccordROC);

            Console.WriteLine("AUC= " + lrAUC);
            //ShowAccordIntermediateData(trainingSet, lrAccord);
            double settedThreshold = ThresholdFromROC(AccordROC, minimumSpecificity: 0.95);

            Console.WriteLine("Accord threshold: " + settedThreshold);
            bool[] lrOutput = AccordDecide(lrAccord, testingSet, settedThreshold);
            //bool[] lrOutput = AccordDecide(lrAccord, testingSet,0.3);

            //var kk = EvaluateResults(IDataViewToAccord(trainingSet).labels, AccordDecide(lrAccord, trainingSet, settedThreshold));
            //double AccordTrainingSpecificity = kk.getSpecificity();
            MLPerformance accordEvaluation = EvaluateResults(correctLabels, lrOutput);



            //BuildMLModel(0.95, trainingSet,testingSet);

            if (false)
            {
                var MLNetModel = BuildAndTrainModel(trainingSet);
                //ViewIntermediateData(model.Transform(testingSet));
                IDataView predictions = MLNetModel.Transform(testingSet);
                bool[]    mlOutput    = ExtractSVMPrediction(predictions);
                //CalibratedBinaryClassificationMetrics MLMetrics = GetMetrics(MLNetModel, testingSet);
                MLPerformance mlEvaluation = EvaluateResults(correctLabels, mlOutput);
            }



            SupportVectorMachine <Gaussian> svm;

            //SVMSaveFile = Path.Combine(InputFileOrganizer.DataRoot, "SVMParams_trainedOn240DCN.txt");
            SVMSaveFile = "";
            if (SVMSaveFile.Equals(""))
            {
                SVMSaveFile = Path.Combine(InputFileOrganizer.OutputFolderOfTheRun, "SVMParams.txt");
                svm         = TrainSVMModel(trainingSet);
                Serializer.Save(obj: svm, path: SVMSaveFile);
            }
            else
            {
                svm = Serializer.Load(SVMSaveFile, out svm);
            }
            bool[]        svmOutput     = SVMDecide(svm, testingSet);
            MLPerformance svmEvaluation = EvaluateResults(correctLabels, svmOutput);

            Output("Training Set: {0}\nTesting Set: {1}", trainingFile, testingFile);
            Output("Accord\t ML.Net \tSVM:");
            //Console.WriteLine("AUC: {0}\t {1}", lrAUC, MLMetrics.AreaUnderRocCurve);
            Output("AUC: {0}\t {1}\t {2}", lrAUC, 0, 0);

            //Console.WriteLine("Specificity: {0}\t {1}\t {2}", AccordTrainingSpecificity, mlEvaluation.getSpecificity(), -1);
            //Console.WriteLine("Accuracy: {0}\t {1}\t {2}", accordEvaluation.getAccuracy(), mlEvaluation.getAccuracy(),-1);

            Output("Specificity: {0}\t {1}\t {2}", accordEvaluation.getSpecificity(), 0, /* mlEvaluation.getSpecificity(),*/ svmEvaluation.getSpecificity());
            Output("Accuracy: {0}\t {1}\t {2}", accordEvaluation.getAccuracy(), 0, /*mlEvaluation.getAccuracy(), */ svmEvaluation.getAccuracy());
            //model = SetThreshold( model, testingSet);



            //UseModelWithSingleItem(mlContext, model);
            sw.Close();
        }