public void MemoryMinibatchSource_GetNextMinibatch()
        {
            var observationsShape = new int[] { 5 };
            var observations      = new MemoryMinibatchData(m_observationsData, observationsShape, 9);
            var targetsShape      = new int[] { 1 };
            var targets           = new MemoryMinibatchData(m_targetData, targetsShape, 9);

            // setup name to data map.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { "observations", observations },
                { "targets", targets }
            };

            var nameToVariable = new Dictionary <string, Variable>
            {
                { "observations", Variable.InputVariable(observationsShape, DataType.Float) },
                { "targets", Variable.InputVariable(targetsShape, DataType.Float) }
            };

            var sut    = new MemoryMinibatchSource(nameToVariable, nameToData, 5, false);
            var device = DeviceDescriptor.CPUDevice;

            for (int i = 0; i < 30; i++)
            {
                var(minibatch, isSweepEnd) = sut.GetNextMinibatch(3, device);
                var obs = minibatch[nameToVariable["observations"]].GetDenseData <float>(nameToVariable["observations"]);
                var tar = minibatch[nameToVariable["targets"]].GetDenseData <float>(nameToVariable["targets"]);
            }
        }
Exemple #2
0
        public void Model_Use_Case()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 100);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, lossFunc, metricFunc, new LearnerVector {
                learner
            });

            var model = new Model(trainer, network, dataType, device);

            // setup name to data.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { "observations", observations },
                { "targets", targets }
            };

            // setup name to variable
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "observations", inputVariable },
                { "targets", targetVariable },
            };

            var trainSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            model.Fit(trainSource, batchSize: 8, epochs: 2);

            (var loss, var metric) = model.Evaluate(trainSource);

            Trace.WriteLine($"Final evaluation - Loss: {loss}, Metric: {metric}");
        }
Exemple #3
0
        //[TestMethod]
        public void Fitter_Loop()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 10000);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var loss   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metric = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, loss, metric, new LearnerVector {
                learner
            });

            // data names
            var observationsName = "observations";
            var targetsName      = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { observationsName, inputVariable },
                { targetsName, targetVariable },
            };

            // setup name to data map.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { observationsName, observations },
                { targetsName, targets }
            };

            var minibatchSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            // setup Fitter
            var fitter = new Fitter(trainer, device);

            // variables for training loop.
            var inputMap = new Dictionary <Variable, Value>();

            var epochs    = 10;
            int batchSize = 32;

            for (int epoch = 0; epoch < epochs;)
            {
                var(minibatch, isSweepEnd) = minibatchSource.GetNextMinibatch(batchSize, device);
                fitter.FitNextStep(minibatch, batchSize);

                if (isSweepEnd)
                {
                    var currentLoss   = fitter.CurrentLoss;
                    var currentMetric = fitter.CurrentMetric;
                    fitter.ResetLossAndMetricAccumulators();

                    var traceOutput = $"Epoch: {epoch + 1:000} Loss = {currentLoss:F8}, Metric = {currentMetric:F8}";

                    ++epoch;

                    Trace.WriteLine(traceOutput);
                }
            }
        }
        public void SharpLearning_With_Cntk_Example()
        {
            // Load data
            var(observations, targets) = DataSetUtilities.LoadWinequalityWhite();

            // transform data for neural net
            var transform = new MinMaxTransformer(0.0, 1.0);

            transform.Transform(observations, observations);

            var featureCount     = observations.ColumnCount;
            var observationCount = observations.RowCount;
            var targetCount      = 1;

            var inputShape  = new int[] { featureCount, 1 };
            var outputShape = new int[] { targetCount };

            // Convert data to float, and wrap as minibatch data.
            var observationsFloat = observations.Data().Select(v => (float)v).ToArray();
            var observationsData  = new MemoryMinibatchData(observationsFloat, inputShape, observationCount);
            var targetsFloat      = targets.Select(v => (float)v).ToArray();
            var targetsData       = new MemoryMinibatchData(targetsFloat, outputShape, observationCount);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.CPUDevice;

            // setup input and target variables.
            var inputVariable  = Layers.Input(inputShape, dataType);
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup name to variable
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "observations", inputVariable },
                { "targets", targetVariable },
            };

            // Get cross validation folds.
            var sampler = new RandomIndexSampler <double>(seed: 24);
            var crossValidationIndexSets = CrossValidationUtilities
                                           .GetKFoldCrossValidationIndexSets(sampler, foldCount: 10, targets: targets);
            var predictions = new double[observationCount];

            // Run cross validation loop.
            foreach (var set in crossValidationIndexSets)
            {
                // setup data.
                var trainingNameToData = new Dictionary <string, MemoryMinibatchData>
                {
                    { "observations", observationsData.GetSamples(set.trainingIndices) },
                    { "targets", targetsData.GetSamples(set.trainingIndices) }
                };

                var validationNameToData = new Dictionary <string, MemoryMinibatchData>
                {
                    { "observations", observationsData.GetSamples(set.validationIndices) },
                    { "targets", targetsData.GetSamples(set.validationIndices) }
                };

                var trainSource      = new MemoryMinibatchSource(nameToVariable, trainingNameToData, seed: 232, randomize: true);
                var validationSource = new MemoryMinibatchSource(nameToVariable, validationNameToData, seed: 232, randomize: false);

                // Create model and fit.
                var model = CreateModel(inputVariable, targetVariable, targetCount, dataType, device);
                model.Fit(trainSource, batchSize: 128, epochs: 10);

                // Predict.
                var predictionsRaw     = model.Predict(validationSource);
                var currentPredictions = predictionsRaw.Select(v => (double)v.Single()).ToArray();

                // set cross-validation predictions
                var validationIndices = set.validationIndices;
                for (int i = 0; i < validationIndices.Length; i++)
                {
                    predictions[validationIndices[i]] = currentPredictions[i];
                }
            }

            Trace.WriteLine(FormatErrorString(targets, predictions));
        }