Exemplo n.º 1
0
        PrepareNNData(Dictionary <string, string> dicMParameters, CreateCustomModel customModel, DeviceDescriptor device)
        {
            try
            {
                //create factory object
                MLFactory f = CreateMLFactory(dicMParameters);

                //create learning params
                var strLearning           = dicMParameters["learning"];
                LearningParameters lrData = MLFactory.CreateLearningParameters(strLearning);

                //create training param
                var strTraining           = dicMParameters["training"];
                TrainingParameters trData = MLFactory.CreateTrainingParameters(strTraining);

                //set model component locations
                var dicPath = MLFactory.GetMLConfigComponentPaths(dicMParameters["paths"]);
                //
                trData.ModelTempLocation  = $"{dicMParameters["root"]}\\{dicPath["TempModels"]}";
                trData.ModelFinalLocation = $"{dicMParameters["root"]}\\{dicPath["Models"]}";
                var strTrainPath = $"{dicMParameters["root"]}\\{dicPath["Training"]}";

                var strValidPath = (string.IsNullOrEmpty(dicPath["Validation"]) || dicPath["Validation"] == " ") ? "":
                                   $"{dicMParameters["root"]}\\{dicPath["Validation"]}";

                //data normalization in case the option is enabled
                //check if network contains Normalization layer and assign value to normalization parameter
                if (dicMParameters["network"].Contains("Normalization"))
                {
                    trData.Normalization = new string[] { MLFactory.m_NumFeaturesGroupName }
                }
                ;

                //perform data normalization according to the normalization parameter
                List <Variable> networkInput = NormalizeInputLayer(trData, f, strTrainPath, strValidPath, device);

                //create network parameters
                Function nnModel = CreateNetworkModel(dicMParameters["network"], networkInput, f.OutputVariables, customModel, device);

                //create minibatch spurce
                var mbs = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), strTrainPath, strValidPath, MinibatchSource.InfinitelyRepeat, trData.RandomizeBatch);

                //return ml parameters
                return(f, lrData, trData, nnModel, mbs);
            }
            catch (Exception)
            {
                throw;
            }
        }
Exemplo n.º 2
0
        public static Tuple <bool, bool, bool> GetDataSetAviability(string mlConfigPath)
        {
            try
            {
                //Load ML model configuration file
                var dicMParameters = MLFactory.LoadMLConfiguration(mlConfigPath);
                //add full path of model folder since model file doesn't contains any absolute path
                dicMParameters.Add("root", MLFactory.GetMLConfigFolder(mlConfigPath));

                //get model daa paths
                var dicPath    = MLFactory.GetMLConfigComponentPaths(dicMParameters["paths"]);
                var training   = dicPath["Training"] != " ";
                var validation = dicPath["Validation"] != " ";
                var test       = dicPath["Test"] != " ";
                return(new Tuple <bool, bool, bool>(training, validation, test));
            }
            catch (Exception)
            {
                throw;
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Evaluate model defined in the mlconfig file
        /// </summary>
        /// <param name="mlconfigPath"></param>
        /// <param name="device"></param>
        /// <returns></returns>
        public static async Task <EvaluationResult> EvaluateMLConfig(string mlconfigPath, DeviceDescriptor device, DataSetType dsType, EvaluationType evType)
        {
            try
            {
                //define eval result
                var er = new EvaluationResult();
                er.OutputClasses = new List <string>()
                {
                    ""
                };
                er.Actual    = new List <float>();
                er.Predicted = new List <float>();
                er.Header    = new List <string>();

                //Load ML configuration file
                var dicMParameters = MLFactory.LoadMLConfiguration(mlconfigPath);
                //add full path of model folder since model file doesn't contains any absolute path
                dicMParameters.Add("root", MLFactory.GetMLConfigFolder(mlconfigPath));

                // get model data paths
                var dicPath = MLFactory.GetMLConfigComponentPaths(dicMParameters["paths"]);
                //parse feature variables
                var projectValues = dicMParameters["training"].Split(MLFactory.m_cntkSpearator, StringSplitOptions.RemoveEmptyEntries);
                var modelName     = MLFactory.GetParameterValue(projectValues, "TrainedModel");
                var nnModelPath   = Path.Combine(dicMParameters["root"], modelName);
                //check if model exists
                if (!MLFactory.IsFileExist(nnModelPath))
                {
                    return(er);
                }
                //
                var dataset = MLFactory.GetDataPath(dicMParameters, dsType);
                if (string.IsNullOrEmpty(dataset) || string.IsNullOrEmpty(dataset) || dataset == " ")
                {
                    if (dsType == DataSetType.Testing)
                    {
                        dataset = MLFactory.GetDataPath(dicMParameters, DataSetType.Validation);
                    }
                    if (string.IsNullOrEmpty(dataset) || string.IsNullOrEmpty(dataset) || dataset == " ")
                    {
                        return(er);
                    }
                }


                //get output classes in case the ml problem is classification
                var strCls = dicMParameters.ContainsKey("metadata") ? dicMParameters["metadata"] : "";
                var oc     = MLFactory.GetOutputClasses(strCls);
                if (oc != null)
                {
                    er.OutputClasses = oc;
                }

                //MInibatch
                var           mbTypestr = MLFactory.GetParameterValue(projectValues, "Type");
                MinibatchType mbType    = (MinibatchType)Enum.Parse(typeof(MinibatchType), mbTypestr, true);
                var           mbSizetr  = MLFactory.GetParameterValue(projectValues, "BatchSize");

                var mf = MLFactory.CreateMLFactory(dicMParameters);


                //perform evaluation
                var evParams = new EvaluationParameters()
                {
                    MinibatchSize = uint.Parse(mbSizetr),
                    MBSource      = new MinibatchSourceEx(mbType, mf.StreamConfigurations.ToArray(), mf.InputVariables, mf.OutputVariables, dataset, null, MinibatchSource.FullDataSweep, false, 0),
                    Input         = mf.InputVariables,
                    Ouptut        = mf.OutputVariables,
                };

                //evaluate model
                if (evType == EvaluationType.FeaturesOnly)
                {
                    if (!dicMParameters.ContainsKey("metadata"))
                    {
                        throw new Exception("The result cannot be exported to Excel, since no metadata is stored in mlconfig file.");
                    }

                    var desc = MLFactory.ParseRawDataSet(dicMParameters["metadata"]);
                    er.Header = MLFactory.GenerateHeader(desc);
                    var fun = Function.Load(nnModelPath, device);
                    //
                    er.DataSet = await Task.Run(() => MLEvaluator.FeaturesAndLabels(fun, evParams, device));

                    return(er);
                }
                else if (evType == EvaluationType.Results)
                {
                    //define header
                    er.Header.Add(evParams.Ouptut.First().Name + "_actual");
                    er.Header.Add(evParams.Ouptut.First().Name + "_predicted");

                    var fun = Function.Load(nnModelPath, device);
                    //
                    var result = await Task.Run(() => MLEvaluator.EvaluateFunction(fun, evParams, device));

                    er.Actual    = result.actual.ToList();
                    er.Predicted = result.predicted.ToList();

                    if (er.OutputClasses.Count < 2 && evParams.Ouptut.First().Shape.Dimensions.Last() > 1)
                    {
                        var result1 = await Task.Run(() => MLEvaluator.EvaluateFunctionEx(fun, evParams, device));

                        er.ActualEx    = result1.actual;
                        er.PredictedEx = result1.predicted;
                    }
                    return(er);
                }
                else if (evType == EvaluationType.ResultExtended)
                {
                    //define header
                    er.Header.Add(evParams.Ouptut.First().Name + "_actual");
                    er.Header.Add(evParams.Ouptut.First().Name + "_predicted");
                    er.Actual      = new List <float>();
                    er.Predicted   = new List <float>();
                    er.ActualEx    = new List <List <float> >();
                    er.PredictedEx = new List <List <float> >();

                    //
                    var fun      = Function.Load(nnModelPath, device);
                    var resultEx = await Task.Run(() => MLEvaluator.EvaluateFunctionEx(fun, evParams, device));

                    //var resultEx = EvaluateFunctionEx(nnModelPath, dataPath, evParams, device);
                    for (int i = 0; i < resultEx.actual.Count(); i++)
                    {
                        var res1 = MLValue.GetResult(resultEx.actual[i]);
                        er.Actual.Add(res1);
                        var res2 = MLValue.GetResult(resultEx.predicted[i]);
                        er.Predicted.Add(res2);
                    }
                    er.ActualEx    = resultEx.actual;
                    er.PredictedEx = resultEx.predicted;

                    return(er);
                }
                else
                {
                    throw new Exception("Unknown evaluation type!");
                }
            }
            catch (Exception)
            {
                throw;
            }
        }