Ejemplo n.º 1
0
        /// <summary>
        /// Parses the string and create list of defined mlconfigs
        /// </summary>
        /// <param name="projectMetaData"></param>
        /// <returns></returns>
        public static List <string> GetMLConfigs(string projectMetaData)
        {
            //parse feature variables
            var dataValues = projectMetaData.Split(MLFactory.m_cntkSpearator, StringSplitOptions.RemoveEmptyEntries);

            var mlconfigs = new List <string>();
            //parse mlconfigs
            var strData = MLFactory.GetParameterValue(dataValues, "MLConfigs");

            if (string.IsNullOrEmpty(strData))
            {
                return(mlconfigs);
            }
            else
            {
                var mod = strData.Split(MLFactory.m_ValueSpearator, StringSplitOptions.RemoveEmptyEntries);
                mlconfigs = mod.ToList();
            }

            return(mlconfigs);
        }
Ejemplo n.º 2
0
        public void loadMLConfigFile_Test01()
        {
            var strPath = "..\\..\\..\\..\\data\\ml_config_file_test.txt";

            //
            var dicMParameters = MLFactory.LoadMLConfiguration(strPath);

            Assert.Equal(8, dicMParameters.Count);

            //List of ml config keywords
            Assert.True(dicMParameters.ContainsKey("features"));
            Assert.True(dicMParameters.ContainsKey("labels"));

            Assert.True(dicMParameters.ContainsKey("network"));
            Assert.True(dicMParameters.ContainsKey("learning"));
            Assert.True(dicMParameters.ContainsKey("training"));

            Assert.True(dicMParameters.ContainsKey("configid"));
            Assert.True(dicMParameters.ContainsKey("metadata"));
            Assert.True(dicMParameters.ContainsKey("paths"));
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Returns full paths of the model components specified by its name
        /// </summary>
        /// <param name="settings"></param>
        /// <param name="mlconfigName"></param>
        /// <returns></returns>
        public static Dictionary <string, string> GetDefaultMLConfigPaths(ProjectSettings settings, string mlconfigName)
        {
            try
            {
                Dictionary <string, string> strMlCOnfig = new Dictionary <string, string>();

                var validPath = settings.ValidationSetCount > 0 ? MLFactory.GetDefaultMLConfigDatSetPath(false) : "";
                //
                var strPaths = $"|Training:{MLFactory.GetDefaultMLConfigDatSetPath(true)} " +
                               $"|Validation:{validPath} " +
                               $"|Test:{MLFactory.GetDefaultMLConfigDatSetPath(false)} " +
                               $"|TempModels:{MLFactory.m_MLTempModelFolder} |Models:{MLFactory.m_MLModelFolder} " +
                               $"|Result:{mlconfigName}_result.csv |Logs:{MLFactory.m_MLLogFolder} ";

                //
                strMlCOnfig.Add("paths", strPaths);
                return(strMlCOnfig);
            }
            catch (Exception)
            {
                throw;
            }
        }
Ejemplo n.º 4
0
        public void NormalizationfeatureGroup_test03()
        {
            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();
            //create factory object
            MLFactory f = new MLFactory();

            //create config streams
            f.CreateIOVariables("|Itemid 1 0 |Sales 4 0 |Color 1 0", "|Label 1 0", DataType.Float);
            var trData = MLFactory.CreateTrainingParameters("|Type: default |BatchSize: 130 |Epochs:5 |Normalization:Sales |SaveWhileTraining: 0 |RandomizeBatch: 0 |ProgressFrequency: 1");

            string trainingPath            = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\cntk_dataset_for_normalization_test.txt";
            string trainingNormalizedPathh = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\cntk_dataset_for_normalization_test_result.txt";

            //string trainingPath = "../../../../data/cntk_dataset_for_normalization_test.txt";
            //string trainingNormalizedPathh = "../../../../data/cntk_dataset_for_normalization_test_result.txt";

            var strTrainData     = System.IO.File.ReadAllLines(trainingNormalizedPathh);
            var normalizedResult = System.IO.File.ReadAllLines(trainingNormalizedPathh);
            var inputVars        = MLFactory.NormalizeInputLayer(trData, f, trainingPath, trainingPath, device);

            //normalization test for train dataset
            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, trainingPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = mbs1.GetNextMinibatch(10, device);

                //go through all functions and perform the calculation
                for (int i = 0; i < inputVars.Count; i++)
                {
                    //
                    var fun     = (Function)inputVars[i];
                    var strName = data.Keys.Where(x => x.m_name.Equals(f.InputVariables[i].Name)).FirstOrDefault();
                    var input   = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables[i], data[strName].data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);
                    var inputValues      = data[strName].data.GetDenseData <float>(fun).Select(x => x[0]).ToList();
                    var normalizedValues = output[fun].GetDenseData <float>(fun).Select(x => x[0]).ToList();
                    int index            = 0;
                    if (i < 2)
                    {
                        index = i;
                    }
                    else
                    {
                        index = i + 3;
                    }
                    var currNorLine = normalizedResult[index].Split(new char[] { '\t', ' ' }).ToList();

                    for (int j = 0; j < normalizedValues.Count; j++)
                    {
                        var n1 = normalizedValues[j].ToString(CultureInfo.InvariantCulture);
                        var n2 = currNorLine[j];
                        if (n1.Length < 2)
                        {
                            Assert.Equal(n1, n2);
                        }
                        else
                        {
                            Assert.Equal(n1.Substring(0, 5), n2.Substring(0, 5));
                        }
                    }
                }
            }
        }
Ejemplo n.º 5
0
        public void gaussNormalization_test01()
        {
            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();
            //create factory object
            MLFactory f = new MLFactory();

            //create config streams
            f.CreateIOVariables("feature 4 0", "flower 3 0", DataType.Float);
            var trData = MLFactory.CreateTrainingParameters("|Type: default |BatchSize: 130 |Epochs:5 |Normalization: 0 |SaveWhileTraining: 0 |RandomizeBatch: 0 |ProgressFrequency: 1");

            string trainingPath   = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_with_hot_vector.txt";
            string validationPath = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_with_hot_vector_test.txt";
            //string trainingPath = "../../../../data/iris_with_hot_vector.txt";
            //string validationPath = "../../../../data/iris_with_hot_vector_test.txt";

            //string trainingNormalizedPathh = "../../../../data/iris_train_normalized.txt";
            string trainingNormalizedPathh = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_train_normalized.txt";
            var    strNormalizedLine       = System.IO.File.ReadAllLines(trainingNormalizedPathh);

            string validationNormalizedPath = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_valid_normalized.txt";
            //string validationNormalizedPath = "../../../../data/iris_valid_normalized.txt";
            var strValidNormalizedLine = System.IO.File.ReadAllLines(validationNormalizedPath);
            //
            List <Function> normalizedInputs = null;

            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                normalizedInputs = mbs1.NormalizeInput(f.InputVariables, device);
            }

            //normalization test for train datatset
            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = mbs1.GetNextMinibatch(130, device);

                //go through all functions and perform the calculation
                foreach (var fun in normalizedInputs)
                {
                    //
                    var input = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables.First(), data.First().Value.data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);

                    var normalizedValues = output[fun].GetDenseData <float>(fun);

                    for (int i = 0; i < normalizedValues.Count; i++)
                    {
                        var currNorLine = strNormalizedLine[i].Split('\t').ToList();
                        for (int j = 0; j < normalizedValues[0].Count(); j++)
                        {
                            var n1 = normalizedValues[i][j].ToString(CultureInfo.InvariantCulture).Substring(0, 5);
                            var n2 = currNorLine[j].Substring(0, 5);
                            Assert.Equal(n1, n2);
                        }
                    }
                }
            }

            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = MinibatchSourceEx.GetFullBatch(mbs1.Type, mbs1.ValidationDataFile, mbs1.StreamConfigurations, device);

                //go through all functions and perform the calculation
                foreach (var fun in normalizedInputs)
                {
                    //
                    var input = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables.First(), data.First().Value.data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);

                    var normalizedValues = output[fun].GetDenseData <float>(fun);

                    for (int i = 0; i < normalizedValues.Count; i++)
                    {
                        var currNorLine = strValidNormalizedLine[i].Split('\t').ToList();
                        for (int j = 0; j < normalizedValues[0].Count(); j++)
                        {
                            var n1 = normalizedValues[i][j].ToString(CultureInfo.InvariantCulture).Substring(0, 5);
                            var n2 = currNorLine[j].Substring(0, 5);
                            Assert.Equal(n1, n2);
                        }
                    }
                }
            }
        }
        public void networkConfiguration_test08()
        {
            MLFactory f = new MLFactory();
            //LSTM Network  in(4)-LSTM(5,5)-out(3), with peepholes and stabilization
            List <NNLayer> layers = new List <NNLayer>()
            {
                new NNLayer()
                {
                    Type = LayerType.LSTM, Param1 = 5, Param2 = 5, FParam = Activation.TanH, BParam2 = true, BParam1 = true, Id = 1, Name = "LSTM Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 3, FParam = Activation.None, Id = 2, Name = "out1"
                },
            };

            //create input and output variable
            var      device     = DeviceDescriptor.UseDefaultDevice();
            Variable featureVar = Variable.InputVariable(new int[] { 4 }, DataType.Float, "in1");
            Variable labelVar   = Variable.InputVariable(new int[] { 3 }, DataType.Float, "out1");


            var nnModel = MLFactory.CreateNetwrok(layers, new List <Variable>()
            {
                { featureVar }
            }, labelVar, device);

            //Structure of the network  parameters
            var nnparams = nnModel.Inputs.Where(p => p.Uid.StartsWith("Parameter")).ToList();

            //weights
            var w = nnparams.Where(p => p.Name.Equals("w")).ToList();

            Assert.Equal(5, w.Count);//4 in lstm and  1 in hidden layer

            // total weights 3x5 + 4*5x4
            Assert.Equal(95, w.Sum(p => p.Shape.TotalSize));
            //total biases
            var b = nnparams.Where(p => p.Name.Equals("b")).ToList();

            Assert.Equal(5, b.Count);//4 in lstm and  1 in output
            //4*1*5 in lstm and  3 in output layer
            Assert.Equal(23, b.Sum(p => p.Shape.TotalSize));

            //4*5*4
            var u = nnparams.Where(p => p.Name.Equals("u")).ToList();

            Assert.Equal(4, u.Count);//4 in lstm
            //4*5*5 in lstm
            Assert.Equal(100, u.Sum(p => p.Shape.TotalSize));

            //peephole only in LSTM.
            var peep = nnparams.Where(p => p.Name.Equals("pe")).ToList();

            //Peep connection in 3 gates ft, it and ot
            Assert.Equal(3, peep.Count);
            //3*5
            Assert.Equal(15, peep.Sum(p => p.Shape.TotalSize));

            //stabilization on all gates: ft, it and ot. when using peepholes 3 extra.
            var stab = nnparams.Where(p => p.Name.Equals("st")).ToList();

            //for peephole lstm count is 3+3
            Assert.Equal(6, stab.Count);
            //6x1
            Assert.Equal(6, stab.Sum(p => p.Shape.TotalSize));
            //constant: 6x3 +1x3
            var constants = nnModel.Inputs.Where(p => p.Uid.StartsWith("Constant")).ToList();

            Assert.Equal(21, constants.Count);
            var variables = nnModel.Inputs.Where(p => p.Name.StartsWith("in1")).ToList();
            var outVars   = nnModel.Outputs.ToList();

            //check first and last variable
            Assert.Equal("in1", nnModel.Arguments[0].Name);
            Assert.Equal("out1", nnModel.Outputs[0].Name);
            Assert.Equal(3, nnModel.Output.Shape.Dimensions[0]);
        }
        public void networkConfiguration_test01()
        {
            MLFactory f = new MLFactory();

            List <NNLayer> layers = new List <NNLayer>()
            {
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 5, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 1, FParam = Activation.None, Id = 2, Name = "out1"
                },
            };
            //create input and output variable
            var      device     = DeviceDescriptor.UseDefaultDevice();
            Variable featureVar = Variable.InputVariable(new int[] { 4 }, DataType.Float, "in1");
            Variable labelVar   = Variable.InputVariable(new int[] { 1 }, DataType.Float, "out1");

            var nnModel = MLFactory.CreateNetwrok(layers, new List <Variable>()
            {
                { featureVar }
            }, labelVar, device);

            //Structure of the network  parameters
            var nnparams = nnModel.Inputs.Where(p => p.Uid.StartsWith("Parameter")).ToList();
            //weights
            var w = nnparams.Where(p => p.Name.Equals("w")).ToList();

            Assert.Equal(2, w.Count);//2 = 1 in hidden and 1 in out layer

            // total weights 1x5 + 4*5 = 25
            Assert.Equal(25, w.Sum(p => p.Shape.TotalSize));

            //total biases
            var b = nnparams.Where(p => p.Name.Equals("b")).ToList();

            Assert.Equal(2, b.Count);//2 = 1 in hidden and 1 in out layer

            //1x5 + 1x1 = 6
            Assert.Equal(6, b.Sum(p => p.Shape.TotalSize));


            //last parameter is related to network output
            var outputLayer = nnModel.Outputs.Where(p => p.Name.Equals(labelVar.Name)).ToList();

            Assert.Single(outputLayer);
            //dimension is 1
            Assert.Equal(1, outputLayer.Sum(p => p.Shape.TotalSize));


            var constants = nnModel.Inputs.Where(p => p.Uid.StartsWith("Constant")).ToList();

            Assert.Empty(constants);//9 constants are from peep
            var variables = nnModel.Inputs.Where(p => p.Name.StartsWith("in1")).ToList();
            var outVars   = nnModel.Outputs.ToList();

            //check first and last variable
            Assert.Equal("in1", nnModel.Arguments[0].Name);
            Assert.Equal("out1", nnModel.Outputs[0].Name);
            Assert.Equal(1, nnModel.Output.Shape.Dimensions[0]);
        }
        public void networkConfiguration_test05()
        {
            MLFactory f = new MLFactory();
            //Deep Neural Network in(4) - 5-10-15-out(3)
            List <NNLayer> layers = new List <NNLayer>()
            {
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 5, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 10, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 15, FParam = Activation.TanH, Id = 1, Name = "Dense Layer"
                },
                new NNLayer()
                {
                    Type = LayerType.Dense, Param1 = 3, FParam = Activation.None, Id = 2, Name = "out1"
                },
            };

            //create input and output variable
            var      device     = DeviceDescriptor.UseDefaultDevice();
            Variable featureVar = Variable.InputVariable(new int[] { 4 }, DataType.Float, "in1");
            Variable labelVar   = Variable.InputVariable(new int[] { 3 }, DataType.Float, "out1");


            var nnModel = MLFactory.CreateNetwrok(layers, new List <Variable>()
            {
                { featureVar }
            }, labelVar, device);


            //Structure of the network  parameters
            var nnparams = nnModel.Inputs.Where(p => p.Uid.StartsWith("Parameter")).ToList();
            //weights
            var w = nnparams.Where(p => p.Name.Equals("w")).ToList();

            Assert.Equal(4, w.Count);//3. One for each hidden layer
            // total weights 4x5 + 5x10 + 10x15 + 15x3 = 265
            Assert.Equal(265, w.Sum(p => p.Shape.TotalSize));
            //total biases
            var b = nnparams.Where(p => p.Name.Equals("b")).ToList();

            Assert.Equal(4, b.Count);//4 (3 for hidden and 1 for output layer)
            // 5x1 + 4x1 + 15x1 + 10x1 +
            Assert.Equal(33, b.Sum(p => p.Shape.TotalSize));


            var constants = nnModel.Inputs.Where(p => p.Uid.StartsWith("Constant")).ToList();

            Assert.Empty(constants);
            var variables = nnModel.Inputs.Where(p => p.Name.StartsWith("in1")).ToList();
            var outVars   = nnModel.Outputs.ToList();

            //check first and last variable
            Assert.Equal("in1", nnModel.Arguments[0].Name);
            Assert.Equal("out1", nnModel.Outputs[0].Name);
            Assert.Equal(3, nnModel.Output.Shape.Dimensions[0]);
        }
Ejemplo n.º 9
0
        public static EvaluationResult EvaluateModel(string mlconfigPath, DataSetType dsType, EvaluationType evType, ProcessDevice pdevice)
        {
            var er = new EvaluationResult();

            er.Header = new List <string>();
            //device definition
            DeviceDescriptor device = MLFactory.GetDevice(pdevice);
            //Load ML model configuration file
            var dicMParameters = MLFactory.LoadMLConfiguration(mlconfigPath);

            //add full path of model folder since model file doesn't contains any absolute path
            dicMParameters.Add("root", Project.GetMLConfigFolder(mlconfigPath));

            // get model data paths
            var dicPath     = MLFactory.GetMLConfigComponentPaths(dicMParameters["paths"]);
            var modelName   = Project.GetParameterValue(dicMParameters["training"], "TrainedModel");
            var nnModelPath = Path.Combine(dicMParameters["root"], modelName);

            //check if model exists
            if (!MLFactory.IsFileExist(nnModelPath))
            {
                return(er);
            }


            //check if dataset files exist
            var dataPath = GetDataPath(dicMParameters, dsType);

            if (!MLFactory.IsFileExist(dataPath))
            {
                //in case validation dataset is not defiend just export traininign dataset
                if (dsType == DataSetType.Validation)
                {
                    dataPath = GetDataPath(dicMParameters, DataSetType.Training);
                }
                if (!MLFactory.IsFileExist(dataPath))
                {
                    return(er);
                }
            }

            //get output classes in case the ml problem is classification
            var strCls = dicMParameters.ContainsKey("metadata") ? dicMParameters["metadata"] : "";

            er.OutputClasses = DataDescriptor.GetOutputClasses(strCls);

            //Minibatch type
            var           mbTypestr = Project.GetParameterValue(dicMParameters["training"], "Type");
            MinibatchType mbType    = (MinibatchType)Enum.Parse(typeof(MinibatchType), mbTypestr, true);
            var           mbSizetr  = Project.GetParameterValue(dicMParameters["training"], "BatchSize");

            var mf = MLFactory.CreateMLFactory(dicMParameters);
            //perform evaluation
            var evParams = new EvaluationParameters()
            {
                MinibatchSize = uint.Parse(mbSizetr),
                MBSource      = new MinibatchSourceEx(mbType, mf.StreamConfigurations.ToArray(), dataPath, null, MinibatchSource.FullDataSweep, false),
                Input         = mf.InputVariables,
                Ouptut        = mf.OutputVariables,
            };

            //evaluate model
            if (evType == EvaluationType.FeaturesOnly)
            {
                if (!dicMParameters.ContainsKey("metadata"))
                {
                    throw new Exception("The result cannot be exported to Excel, since no metadata is stored in mlconfig file.");
                }
                var desc = ParseRawDataSet(dicMParameters["metadata"]);
                er.Header  = generateHeader(desc);
                er.DataSet = FeatureAndLabels(nnModelPath, dataPath, evParams, device);

                return(er);
            }
            else if (evType == EvaluationType.Results)
            {
                //define header
                er.Header.Add(evParams.Ouptut.First().Name + "_actual");
                er.Header.Add(evParams.Ouptut.First().Name + "_predicted");

                var result = EvaluateFunction(nnModelPath, dataPath, evParams, device);
                er.Actual    = result.actual.ToList();
                er.Predicted = result.predicted.ToList();
                return(er);
            }
            else if (evType == EvaluationType.ResultyExtended)
            {
                //define header
                er.Header.Add(evParams.Ouptut.First().Name + "_actual");
                er.Header.Add(evParams.Ouptut.First().Name + "_predicted");
                er.Actual      = new List <float>();
                er.Predicted   = new List <float>();
                er.ActualEx    = new List <List <float> >();
                er.PredictedEx = new List <List <float> >();
                //
                var resultEx = EvaluateFunctionEx(nnModelPath, dataPath, evParams, device);
                for (int i = 0; i < resultEx.actual.Count(); i++)
                {
                    var res1 = MLValue.GetResult(resultEx.actual[i]);
                    er.Actual.Add(res1);
                    var res2 = MLValue.GetResult(resultEx.predicted[i]);
                    er.Predicted.Add(res2);
                }
                er.ActualEx    = resultEx.actual;
                er.PredictedEx = resultEx.predicted;

                return(er);
            }
            else
            {
                throw new Exception("Unknown evaluation type!");
            }
        }
Ejemplo n.º 10
0
 public static List <VariableDescriptor> ParseRawDataSet(string metaDataValues)
 {
     return(MLFactory.ParseRawDataSet(metaDataValues));
 }
Ejemplo n.º 11
0
 private void CreateDC()
 {
     m_missileLauncher = MLFactory.CreateMissileLauncher(MLType.DreamCheeky);
     GetCount();
     GetPosition();
 }
Ejemplo n.º 12
0
 private void CreateMock()
 {
     m_missileLauncher = MLFactory.CreateMissileLauncher(MLType.Mock);
     //GetCount();
     //GetPosition();
 }
Ejemplo n.º 13
0
        /// <summary>
        /// Evaluate the model against dataset sored in the dataset file, and exports the result in csv format for further analysis
        /// </summary>
        /// <param name="mlF"> ml factory object contains members needed to evaluation process</param>
        /// <param name="mbs"> Minibatch source which provides helpers members needed for for evaluation</param>
        /// <param name="strDataSetPath"> file of dataset</param>
        /// <param name="modelPath"> models which will be evaluate</param>
        /// <param name="resultExportPath"> result file in which the result will be exported</param>
        /// <param name="device"> device for computation</param>
        public static void EvaluateModel(string mlconfigPath, string bestTrainedModelPath, DeviceDescriptor device)
        {
            //Load ML model configuration file
            var dicMParameters = MLFactory.LoadMLConfiguration(mlconfigPath);

            //add full path of model folder since model file doesn't contains any absolute path
            dicMParameters.Add("root", MLFactory.GetMLConfigFolder(mlconfigPath));

            //get model daa paths
            var dicPath = MLFactory.GetMLConfigComponentPaths(dicMParameters["paths"]);

            //parse feature variables
            var projectValues            = dicMParameters["training"].Split(MLFactory.m_cntkSpearator, StringSplitOptions.RemoveEmptyEntries);
            var trainedModelRelativePath = MLFactory.GetParameterValue(projectValues, "TrainedModel");


            //Minibatch type
            var           mbTypestr = MLFactory.GetParameterValue(projectValues, "Type");
            MinibatchType mbType    = (MinibatchType)Enum.Parse(typeof(MinibatchType), mbTypestr, true);
            //prepare MLFactory
            var f = MLFactory.CreateMLFactory(dicMParameters);

            //prepare data paths for mini-batch source
            var strTrainPath = $"{dicMParameters["root"]}\\{dicPath["Training"]}";
            var strValidPath = $"{dicMParameters["root"]}\\{dicPath["Validation"]}";
            var strResult    = $"{dicMParameters["root"]}\\{dicPath["Result"]}";

            var bestModelFullPath = $"{dicMParameters["root"]}\\{bestTrainedModelPath}";
            //decide what data to evaluate
            var dataPath = strValidPath;

            //load model
            var model = Function.Load(bestModelFullPath, device);

            //get data for evaluation by calling GetFullBatch
            var minibatchData = MinibatchSourceEx.GetFullBatch(mbType, dataPath, f.StreamConfigurations.ToArray(), device);
            //input map creation for model evaluation
            var inputMap = new Dictionary <Variable, Value>();

            foreach (var v in minibatchData)
            {
                var vv         = model.Arguments.Where(x => x.Name == v.Key.m_name).FirstOrDefault();
                var streamInfo = v.Key;
                if (vv != null)
                {
                    inputMap.Add(vv, minibatchData[streamInfo].data);
                }
            }

            //output map
            var predictedDataMap = new Dictionary <Variable, Value>();

            foreach (var outp in model.Outputs)
            {
                predictedDataMap.Add(outp, null);
            }

            //model evaluation
            model.Evaluate(inputMap, predictedDataMap, device);

            //retrieve actual and predicted values from model
            List <List <float> > actual  = new List <List <float> >();
            List <List <float> > predict = new List <List <float> >();

            foreach (var output in model.Outputs)
            {
                //label stream
                var labelStream = minibatchData.Keys.Where(x => x.m_name == output.Name).First();

                //actual values
                List <List <float> > av = MLValue.GetValues(output, minibatchData[labelStream].data);
                //predicted values
                List <List <float> > pv = MLValue.GetValues(output, predictedDataMap[output]);

                for (int i = 0; i < av.Count; i++)
                {
                    //actual
                    var act = av[i];
                    if (actual.Count <= i)
                    {
                        actual.Add(new List <float>());
                    }
                    actual[i].AddRange(act);
                    //prediction
                    var prd = pv[i];
                    if (predict.Count <= i)
                    {
                        predict.Add(new List <float>());
                    }
                    predict[i].AddRange(prd);
                }
            }


            //export result
            MLValue.ValueToFile(actual, predict, strResult);

            //
            Console.WriteLine(Environment.NewLine);
            Console.WriteLine($"*******************Model Evaluation**************");
            Console.WriteLine(Environment.NewLine);
            Console.WriteLine($"Model Evaluation successfully exported result into file {strResult}!");
            Console.WriteLine(Environment.NewLine);
        }
Ejemplo n.º 14
0
        static void Main(string[] args) //added argc, for number of arguments in cmd line
        {
            string filePath   = "";
            string command    = "";
            double phi        = 0.0;
            double theta      = 0.0;
            string targetName = "";

            char[]     delimiterChar = { ' ' };
            var        DCLauncher    = MLFactory.CreateMissileLauncher(MLType.DreamCheeky);
            FileReader iniReader     = null;

            Console.WriteLine("The system has loaded.");
            Console.WriteLine("Argh! Ready ta fire Captain! Argh! Argghh! Arggggghhhhh!");

            while (command.ToUpper() != "EXIT")
            {
                Console.Write(">");
                command = Console.ReadLine();
                command = command.ToUpper();
                if (command.StartsWith("FIRE"))
                {
                    DCLauncher.Fire();
                }
                else if (command.StartsWith("MOVE "))
                {
                    var values = command.Split(delimiterChar);
                    try
                    {
                        if (values.Length < 3)
                        {
                            throw new Exception("Errorargggghh!! Ye di'not enter a valid phi or theta");
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex);
                    }
                    phi   = Convert.ToDouble(values[1]);
                    theta = Convert.ToDouble(values[2]);
                    DCLauncher.MoveTo((phi * 22.2), (theta * 22.2));
                }
                else if (command.StartsWith("MOVEBY"))
                {
                    var values = command.Split(delimiterChar);
                    try
                    {
                        if (values.Length < 3)
                        {
                            throw new Exception("Errorargggghh!! Ye di'not enter a valid phi or theta");
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex);
                    }

                    phi   = Convert.ToDouble(values[1]);
                    theta = Convert.ToDouble(values[2]);
                    DCLauncher.MoveBy((phi * 22.2), (theta * 22.2));
                }
                else if (command.StartsWith("RELOAD"))
                {
                    DCLauncher.Reload();
                }
                else if (command.StartsWith("LOAD"))
                {
                    var values = command.Split(delimiterChar);
                    try
                    {
                        if (values.Length < 2)
                        {
                            throw new Exception("Errorargh! Ye din't specify a file!");
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex.Message);
                    }
                    filePath = values[1];
                    var fileExists = System.IO.File.Exists(filePath);
                    if (!fileExists)
                    {
                        Console.WriteLine("Yarrgh! No file exists! Arrrrgh!!");
                    }
                    else
                    {
                        Console.WriteLine("Argghh! Argghh! Thar be new targets in sight, cap'tin.... Argh!");
                        iniReader = FRFactory.CreateReader(FRType.INIReader, filePath);
                    }
                }
                else if (command.StartsWith("SCOUNDRELS"))
                {
                    if (iniReader == null)
                    {
                        Console.WriteLine("Arrrggh! Ye din't load a file yet, you scallywag!");
                    }
                    else
                    {
                        iniReader.Scoundrels();
                    }
                }
                else if (command.StartsWith("FRIEND"))
                {
                    if (iniReader == null)
                    {
                        Console.WriteLine("Arrrggh! Ye din't load a file yet, you scallywag!");
                    }
                    else
                    {
                        iniReader.Friends();
                    }
                }
                else if (command.StartsWith("KILL"))
                {
                    if (iniReader == null)
                    {
                        Console.WriteLine("Arrrggh! Ye din't load a file yet, you scallywag!");
                    }
                    else
                    {
                        var values = command.Split(delimiterChar);
                        try
                        {
                            if (values.Length < 2)
                            {
                                throw new Exception("Errorargh! Ye din't specify a file!");
                            }
                        }
                        catch (Exception ex)
                        {
                            Console.WriteLine(ex.Message);
                        }
                        targetName = values[1];
                        bool friend = iniReader.isFriend(targetName);
                        if (friend)
                        {
                            Console.WriteLine("Sorry Captain, we don’t permit friendly fire, yar");
                        }
                        else
                        {
                            TargetManager targetManager = TargetManager.GetInstance();
                            double[]      phitheta      = targetManager.getCoordinates(targetName);
                            phi   = phitheta[0];
                            theta = phitheta[1];
                            DCLauncher.MoveTo((phi * 22.2), (theta * 22.2));
                            //Console.ReadLine();
                            DCLauncher.Fire();

                            targetManager.changeStatus(targetName);
                        }
                    }
                }
                else if (command.StartsWith("STATUS"))
                {
                    DCLauncher.PrintStatus();
                }
                else
                {
                    Console.WriteLine("Errorarggh!, di'not enter a valid command! Yarrrgggghhhh!!!");
                }
            }



            ////create missile launcher using factory:
            //var dClaLauncher = MLFactory.CreateMissileLauncher(MLType.DreamCheeky);
            //var mLauncher = MLFactory.CreateMissileLauncher(MLType.Mock);
            ////create file reader using factory:
            //var iniReader = FRFactory.CreateReader(FRType.INIReader, filePath);
            //var jsonReader = FRFactory.CreateReader(FRType.JSONReader, filePath);
            //var xmlReader = FRFactory.CreateReader(FRType.XMLReader, filePath);
            ////using adapter to control missile launcher:
            //var controller = new Controller();
            //controller.Launcher = new MissileLauncherAdapter();
            ////fire
            //controller.Fire();
            ////moveto
            //controller.MoveTo(0.0, 0.0);
            ////moveby
            //controller.MoveBy(0.0, 0.0);
            ////Reload command:
            //dClaLauncher.Reload();
            //mLauncher.Reload();
            ////Status command:
            //dClaLauncher.PrintStatus();
            //mLauncher.PrintStatus();
        }
Ejemplo n.º 15
0
 public static List <string> GetOutputClasses(string strMetadata)
 {
     return(MLFactory.GetOutputClasses(strMetadata));
 }
Ejemplo n.º 16
0
 public static List <string> GetColumnClasses(string columnData)
 {
     return(MLFactory.GetColumnClasses(columnData));
 }
Ejemplo n.º 17
0
 /// <summary>
 /// Parses the array of string find the parameter and return parameter value
 /// </summary>
 /// <param name="strData"></param>
 /// <param name="name"></param>
 /// <returns></returns>
 public static string GetParameterValue(string[] strData, string name)
 {
     return(MLFactory.GetParameterValue(strData, name));
 }