コード例 #1
0
ファイル: Project.cs プロジェクト: bhrnjica/anndotnet
        /// <summary>
        /// Evaluate mlconfig stored in mlconfigPath for input row stored in vector array
        /// </summary>
        /// <param name="mlconfigPath"></param>
        /// <param name="vector"></param>
        /// <param name="pdevice"></param>
        /// <returns></returns>
        public static object Predict(string mlconfigPath, float[] vector, ProcessDevice pdevice)
        {
            //device definition
            DeviceDescriptor device = MLFactory.GetDevice(pdevice);

            return(MLEvaluator.TestModel(mlconfigPath, vector, device));
        }
コード例 #2
0
ファイル: Project.cs プロジェクト: bhrnjica/anndotnet
        public static async Task <EvaluationResult> EvaluateMLConfig(string mlconfigPath, DataSetType dsType, EvaluationType evType, ProcessDevice pdevice)
        {
            //device definition
            DeviceDescriptor device = MLFactory.GetDevice(pdevice);

            return(await MLEvaluator.EvaluateMLConfig(mlconfigPath, device, dsType, evType));
        }
コード例 #3
0
ファイル: MachineLearning.cs プロジェクト: bhrnjica/anndotnet
        /// <summary>
        /// Evaluate model for specific set of values
        /// </summary>
        /// <param name="mlConfigPath"></param>
        /// <param name="resultPath"></param>
        public static List <float> Predict(string modelPath, float[][] rowVector)
        {
            //
            var device = DeviceDescriptor.UseDefaultDevice();
            var result = MLEvaluator.TestModel(modelPath, rowVector, device);

            return(result);
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: zaharPonimash/anndotnet
        private static void runEvaluationText()
        {
            var modelFile = "../../../data/model_to_evaluate.model";
            var inputRow  = new List <float>()
            {
                30f, 0f, 106.425f, 1f, 0f, 0f, 1f, 0f
            };

            var result = MLEvaluator.TestModel(modelFile, inputRow.ToArray(), DeviceDescriptor.UseDefaultDevice());
        }
コード例 #5
0
 public static (List <List <float> > actual, List <List <float> > predicted) EvaluateFunctionEx(string nnModel, string dataPath, EvaluationParameters evParam, DeviceDescriptor device)
 {
     try
     {
         var fun = Function.Load(nnModel, device);
         //
         return(MLEvaluator.EvaluateFunctionEx(fun, evParam, device));
     }
     catch (Exception)
     {
         throw;
     }
 }
コード例 #6
0
 public static Dictionary <string, List <List <float> > > FeatureAndLabels(string nnModel, string dataPath, EvaluationParameters evParam, DeviceDescriptor device)
 {
     try
     {
         var fun = Function.Load(nnModel, device);
         //
         return(MLEvaluator.FeaturesAndLabels(fun, evParam, device));
     }
     catch (Exception)
     {
         throw;
     }
 }
コード例 #7
0
ファイル: Project.cs プロジェクト: bhrnjica/anndotnet
        /// <summary>
        /// Evaluate mlconfig stored in mlconfigPath for input row stored in vector array
        /// </summary>
        /// <param name="mlconfigPath"></param>
        /// <param name="vector"></param>
        /// <param name="pdevice"></param>
        /// <returns></returns>
        public static List <int> Predict(string mlconfigPath, string[] imagePaths, ProcessDevice pdevice)
        {
            try
            {
                //device definition
                DeviceDescriptor device = MLFactory.GetDevice(pdevice);

                return(MLEvaluator.TestModel(mlconfigPath, imagePaths, device));
            }
            catch (Exception)
            {
                throw;
            }
        }
コード例 #8
0
        public async Task <ModelEvaluation> EvaluateModel()
        {
            try
            {
                //change application in run mode
                IconUri = "Images/runningmodel.png";
                RaisePropertyChangedEvent("IsRunning");

                //init empty model evaluation
                var mEval = new ModelEvaluation()
                {
                    TrainingValue        = new List <ZedGraph.PointPair>(),
                    ValidationValue      = new List <ZedGraph.PointPair>(),
                    ModelValueTraining   = new List <ZedGraph.PointPair>(),
                    ModelValueValidation = new List <ZedGraph.PointPair>(),
                    Classes        = new List <string>(),
                    ModelOutputDim = 1
                };
                var mpt = new ModelPerformance();
                mpt.DatSetName = "Training set";
                var mpv = new ModelPerformance();
                mpv.DatSetName = "Validation set";


                //check if the trained model exists
                if (string.IsNullOrEmpty(TrainingParameters.LastBestModel) || string.IsNullOrEmpty(TrainingParameters.LastBestModel.Trim(' ')))
                {
                    await Application.Current.Dispatcher.BeginInvoke(
                        DispatcherPriority.Background,
                        new Action(
                            () => MainWindow.SetCursor(false)
                            ));


                    return(mEval);
                }


                //get model full path
                var modelMLPath = Project.GetMLConfigPath(Settings, Name);
                //check if file exists
                var fi = new FileInfo(modelMLPath);
                if (!fi.Exists)
                {
                    return(mEval);
                }
                //evaluate model against training data
                var task1 = await Task.Run(() => Project.EvaluateMLConfig(modelMLPath, DataSetType.Training, EvaluationType.ResultExtended, ProcessDevice.Default));

                var resultTrain = task1;

                //evaluate model against validation data
                var task2 = await Task.Run(() => Project.EvaluateMLConfig(modelMLPath, DataSetType.Validation, EvaluationType.ResultExtended, ProcessDevice.Default));

                var resultValidation = task2;

                if (resultTrain.Actual == null && resultTrain.Actual.Count <= 0)
                {
                    return(mEval);
                }

                ////prepare evaluation result
                for (int i = 0; i < resultTrain.Actual.Count(); i++)
                {
                    mEval.TrainingValue.Add(new PointPair(i + 1, resultTrain.Actual[i]));
                }

                for (int i = 0; i < resultTrain.Predicted.Count(); i++)
                {
                    mEval.ModelValueTraining.Add(new PointPair(i + 1, resultTrain.Predicted[i]));
                }

                ////no validation set defined
                if (resultValidation.Actual != null && resultValidation.Actual.Count > 0)
                {
                    for (int i = 0; i < resultValidation.Actual.Count(); i++)
                    {
                        mEval.ValidationValue.Add(new PointPair(i + 1, resultValidation.Actual[i]));
                    }


                    for (int i = 0; i < resultValidation.Predicted.Count(); i++)
                    {
                        mEval.ModelValueValidation.Add(new PointPair(i + 1, resultValidation.Predicted[i]));
                    }
                }

                ////
                mEval.Classes        = resultTrain.OutputClasses;
                mEval.ModelOutputDim = resultTrain.OutputClasses == null ? 1 : resultTrain.OutputClasses.Count;

                //training performance result
                mpt = MLEvaluator.CalculatePerformance(resultTrain, "Training set");
                //validation performance result
                mpv = MLEvaluator.CalculatePerformance(resultValidation, "Validation set");


                mEval.TrainPerformance = mpt;
                if (mEval.Classes != null)
                {
                    mEval.TrainPerformance.Classes = mEval.Classes.ToArray();
                }
                mEval.ValidationPerformance = mpv;
                if (mEval.Classes != null)
                {
                    mEval.ValidationPerformance.Classes = mEval.Classes.ToArray();
                }
                ModelEvaluation = mEval;

                return(mEval);
            }
            catch (Exception)
            {
                throw;
            }
            finally
            {
                //change application in normal mode
                IconUri = "Images/model.png";
                RaisePropertyChangedEvent("IsRunning");
            }
        }