Exemple #1
0
        public void NormalizationfeatureGroup_test03()
        {
            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();
            //create factory object
            MLFactory f = new MLFactory();

            //create config streams
            f.CreateIOVariables("|Itemid 1 0 |Sales 4 0 |Color 1 0", "|Label 1 0", DataType.Float);
            var trData = MLFactory.CreateTrainingParameters("|Type: default |BatchSize: 130 |Epochs:5 |Normalization:Sales |SaveWhileTraining: 0 |RandomizeBatch: 0 |ProgressFrequency: 1");

            string trainingPath            = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\cntk_dataset_for_normalization_test.txt";
            string trainingNormalizedPathh = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\cntk_dataset_for_normalization_test_result.txt";

            //string trainingPath = "../../../../data/cntk_dataset_for_normalization_test.txt";
            //string trainingNormalizedPathh = "../../../../data/cntk_dataset_for_normalization_test_result.txt";

            var strTrainData     = System.IO.File.ReadAllLines(trainingNormalizedPathh);
            var normalizedResult = System.IO.File.ReadAllLines(trainingNormalizedPathh);
            var inputVars        = MLFactory.NormalizeInputLayer(trData, f, trainingPath, trainingPath, device);

            //normalization test for train dataset
            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, trainingPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = mbs1.GetNextMinibatch(10, device);

                //go through all functions and perform the calculation
                for (int i = 0; i < inputVars.Count; i++)
                {
                    //
                    var fun     = (Function)inputVars[i];
                    var strName = data.Keys.Where(x => x.m_name.Equals(f.InputVariables[i].Name)).FirstOrDefault();
                    var input   = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables[i], data[strName].data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);
                    var inputValues      = data[strName].data.GetDenseData <float>(fun).Select(x => x[0]).ToList();
                    var normalizedValues = output[fun].GetDenseData <float>(fun).Select(x => x[0]).ToList();
                    int index            = 0;
                    if (i < 2)
                    {
                        index = i;
                    }
                    else
                    {
                        index = i + 3;
                    }
                    var currNorLine = normalizedResult[index].Split(new char[] { '\t', ' ' }).ToList();

                    for (int j = 0; j < normalizedValues.Count; j++)
                    {
                        var n1 = normalizedValues[j].ToString(CultureInfo.InvariantCulture);
                        var n2 = currNorLine[j];
                        if (n1.Length < 2)
                        {
                            Assert.Equal(n1, n2);
                        }
                        else
                        {
                            Assert.Equal(n1.Substring(0, 5), n2.Substring(0, 5));
                        }
                    }
                }
            }
        }
Exemple #2
0
        public void gaussNormalization_test01()
        {
            DeviceDescriptor device = DeviceDescriptor.UseDefaultDevice();
            //create factory object
            MLFactory f = new MLFactory();

            //create config streams
            f.CreateIOVariables("feature 4 0", "flower 3 0", DataType.Float);
            var trData = MLFactory.CreateTrainingParameters("|Type: default |BatchSize: 130 |Epochs:5 |Normalization: 0 |SaveWhileTraining: 0 |RandomizeBatch: 0 |ProgressFrequency: 1");

            string trainingPath   = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_with_hot_vector.txt";
            string validationPath = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_with_hot_vector_test.txt";
            //string trainingPath = "../../../../data/iris_with_hot_vector.txt";
            //string validationPath = "../../../../data/iris_with_hot_vector_test.txt";

            //string trainingNormalizedPathh = "../../../../data/iris_train_normalized.txt";
            string trainingNormalizedPathh = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_train_normalized.txt";
            var    strNormalizedLine       = System.IO.File.ReadAllLines(trainingNormalizedPathh);

            string validationNormalizedPath = "C:\\sc\\github\\anndotnet\\test\\anndotnet.unit\\data\\iris_valid_normalized.txt";
            //string validationNormalizedPath = "../../../../data/iris_valid_normalized.txt";
            var strValidNormalizedLine = System.IO.File.ReadAllLines(validationNormalizedPath);
            //
            List <Function> normalizedInputs = null;

            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                normalizedInputs = mbs1.NormalizeInput(f.InputVariables, device);
            }

            //normalization test for train datatset
            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = mbs1.GetNextMinibatch(130, device);

                //go through all functions and perform the calculation
                foreach (var fun in normalizedInputs)
                {
                    //
                    var input = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables.First(), data.First().Value.data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);

                    var normalizedValues = output[fun].GetDenseData <float>(fun);

                    for (int i = 0; i < normalizedValues.Count; i++)
                    {
                        var currNorLine = strNormalizedLine[i].Split('\t').ToList();
                        for (int j = 0; j < normalizedValues[0].Count(); j++)
                        {
                            var n1 = normalizedValues[i][j].ToString(CultureInfo.InvariantCulture).Substring(0, 5);
                            var n2 = currNorLine[j].Substring(0, 5);
                            Assert.Equal(n1, n2);
                        }
                    }
                }
            }

            using (var mbs1 = new MinibatchSourceEx(trData.Type, f.StreamConfigurations.ToArray(), trainingPath, validationPath, MinibatchSource.FullDataSweep, trData.RandomizeBatch))
            {
                var data = MinibatchSourceEx.GetFullBatch(mbs1.Type, mbs1.ValidationDataFile, mbs1.StreamConfigurations, device);

                //go through all functions and perform the calculation
                foreach (var fun in normalizedInputs)
                {
                    //
                    var input = new Dictionary <Variable, Value>()
                    {
                        { f.InputVariables.First(), data.First().Value.data }
                    };

                    var output = new Dictionary <Variable, Value>()
                    {
                        { fun, null }
                    };
                    //
                    fun.Evaluate(input, output, device);

                    var normalizedValues = output[fun].GetDenseData <float>(fun);

                    for (int i = 0; i < normalizedValues.Count; i++)
                    {
                        var currNorLine = strValidNormalizedLine[i].Split('\t').ToList();
                        for (int j = 0; j < normalizedValues[0].Count(); j++)
                        {
                            var n1 = normalizedValues[i][j].ToString(CultureInfo.InvariantCulture).Substring(0, 5);
                            var n2 = currNorLine[j].Substring(0, 5);
                            Assert.Equal(n1, n2);
                        }
                    }
                }
            }
        }