static Model CreateModel(Function inputVariable, Variable targetVariable, int targetCount,
                                 DataType dataType, DeviceDescriptor device)
        {
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = inputVariable

                          .Dense(32, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(32, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(targetCount, weightInit(), biasInit, device, dataType);

            // loss
            var lossFunc   = Losses.MeanSquaredError(network.Output, targetVariable);
            var metricFunc = Losses.MeanAbsoluteError(network.Output, targetVariable);

            // setup trainer.
            var learner = CntkCatalyst.Learners.Adam(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, lossFunc, metricFunc, new LearnerVector {
                learner
            });

            var model = new Model(trainer, network, dataType, device);

            Trace.WriteLine(model.Summary());
            return(model);
        }
Esempio n. 2
0
        public void Dense_InputRank_And_MapRank_At_The_Same_Time_Throws()
        {
            var inputVariable = CNTKLib.InputVariable(new int[] { 2 }, m_dataType);

            Layers.Dense(inputVariable, 2, Initializers.GlorotUniform(seed: 232), Initializers.Zero(),
                         m_device, m_dataType, inputRank: 1, mapRank: 1);
        }
Esempio n. 3
0
        public void Model_Use_Case()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 100);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, lossFunc, metricFunc, new LearnerVector {
                learner
            });

            var model = new Model(trainer, network, dataType, device);

            // setup name to data.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { "observations", observations },
                { "targets", targets }
            };

            // setup name to variable
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "observations", inputVariable },
                { "targets", targetVariable },
            };

            var trainSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            model.Fit(trainSource, batchSize: 8, epochs: 2);

            (var loss, var metric) = model.Evaluate(trainSource);

            Trace.WriteLine($"Final evaluation - Loss: {loss}, Metric: {metric}");
        }
Esempio n. 4
0
        //[TestMethod]
        public void Fitter_Loop()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 10000);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var loss   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metric = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, loss, metric, new LearnerVector {
                learner
            });

            // data names
            var observationsName = "observations";
            var targetsName      = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { observationsName, inputVariable },
                { targetsName, targetVariable },
            };

            // setup name to data map.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { observationsName, observations },
                { targetsName, targets }
            };

            var minibatchSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            // setup Fitter
            var fitter = new Fitter(trainer, device);

            // variables for training loop.
            var inputMap = new Dictionary <Variable, Value>();

            var epochs    = 10;
            int batchSize = 32;

            for (int epoch = 0; epoch < epochs;)
            {
                var(minibatch, isSweepEnd) = minibatchSource.GetNextMinibatch(batchSize, device);
                fitter.FitNextStep(minibatch, batchSize);

                if (isSweepEnd)
                {
                    var currentLoss   = fitter.CurrentLoss;
                    var currentMetric = fitter.CurrentMetric;
                    fitter.ResetLossAndMetricAccumulators();

                    var traceOutput = $"Epoch: {epoch + 1:000} Loss = {currentLoss:F8}, Metric = {currentMetric:F8}";

                    ++epoch;

                    Trace.WriteLine(traceOutput);
                }
            }
        }
Esempio n. 5
0
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.Xavier(random.Next(), scale: 0.02);
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Setup generator
            var ganeratorInputShape = NDShape.CreateNDShape(new int[] { 100 });
            var generatorInput      = Variable.InputVariable(ganeratorInputShape, dataType);
            var generatorNetwork    = Generator(generatorInput, weightInit, biasInit, device, dataType);

            // Setup discriminator
            var discriminatorInputShape = NDShape.CreateNDShape(new int[] { 784 }); // 28 * 28 * 1.
            var discriminatorInput      = Variable.InputVariable(discriminatorInputShape, dataType);
            // scale image input between -1.0 and 1.0.
            var discriminatorInputScaled = CNTKLib.Minus(
                CNTKLib.ElementTimes(Constant.Scalar(2 * 0.00390625f, device), discriminatorInput),
                Constant.Scalar(1.0f, device));

            var discriminatorNetwork = Discriminator(discriminatorInputScaled, weightInit, biasInit, device, dataType);

            // The discriminator must be used on both the real MNIST images and fake images generated by the generator function.
            // One way to represent this in the computational graph is to create a clone of the output of the discriminator function,
            // but with substituted inputs. Setting method = share in the clone function ensures that both paths through the discriminator model
            // use the same set of parameters.
            var discriminatorNetworkFake = discriminatorNetwork
                                           .Clone(ParameterCloningMethod.Share, replacements:
                                                  new Dictionary <Variable, Variable>
            {
                { discriminatorInputScaled.Output, generatorNetwork.Output },
            });

            // Create minibatch source for providing the real images.
            var imageNameToVariable = new Dictionary <string, Variable> {
                { "features", discriminatorInput }
            };
            var imageMinibatchSource = CreateMinibatchSource(trainFilePath, imageNameToVariable, randomize: true);

            // Create minibatch source for providing the noise.
            var noiseNameToVariable = new Dictionary <string, Variable> {
                { "noise", generatorInput }
            };
            var noiseMinibatchSource = new UniformNoiseMinibatchSource(noiseNameToVariable, min: -1.0f, max: 1.0f, seed: random.Next());

            // Combine both sources in the composite minibatch source.
            var compositeMinibatchSource = new CompositeMinibatchSource(imageMinibatchSource, noiseMinibatchSource);

            // Setup generator loss: 1.0 - C.log(D_fake)
            var generatorLossFunc = CNTKLib.Minus(Constant.Scalar(1.0f, device),
                                                  CNTKLib.Log(discriminatorNetworkFake));

            // Setup discriminator loss: -(C.log(D_real) + C.log(1.0 - D_fake))
            var discriminatorLossFunc = CNTKLib.Negate(CNTKLib.Plus(CNTKLib.Log(discriminatorNetwork),
                                                                    CNTKLib.Log(CNTKLib.Minus(Constant.Scalar(1.0f, device), discriminatorNetworkFake))));

            // Create fitters for the training loop.
            // Generator uses Adam and discriminator SGD.
            // Advice from: https://github.com/soumith/ganhacks
            var generatorLearner = Learners.Adam(generatorNetwork.Parameters(),
                                                 learningRate: 0.0002, momentum: 0.5, gradientClippingThresholdPerSample: 1.0);
            var generatorFitter = CreateFitter(generatorLearner, generatorNetwork, generatorLossFunc, device);

            var discriminatorLearner = Learners.SGD(discriminatorNetwork.Parameters(),
                                                    learningRate: 0.0002, gradientClippingThresholdPerSample: 1.0);
            var discriminatorFitter = CreateFitter(discriminatorLearner, discriminatorNetwork, discriminatorLossFunc, device);

            int epochs    = 30;
            int batchSize = 128;

            // Controls how many steps the discriminator takes,
            // each time the generator takes 1 step.
            // Default from the original paper is 1.
            int discriminatorSteps = 1;

            var isSweepEnd = false;

            for (int epoch = 0; epoch < epochs;)
            {
                for (int step = 0; step < discriminatorSteps; step++)
                {
                    // Discriminator needs both real images and noise,
                    // so uses the composite minibatch source.
                    var minibatchItems = compositeMinibatchSource.GetNextMinibatch(batchSize, device);
                    isSweepEnd = minibatchItems.isSweepEnd;

                    discriminatorFitter.FitNextStep(minibatchItems.minibatch, batchSize);
                    DisposeValues(minibatchItems.minibatch);
                }

                // Generator only needs noise images,
                // so uses the noise minibatch source separately.
                var noiseMinibatchItems = noiseMinibatchSource.GetNextMinibatch(batchSize, device);

                generatorFitter.FitNextStep(noiseMinibatchItems.minibatch, batchSize);
                DisposeValues(noiseMinibatchItems.minibatch);

                if (isSweepEnd)
                {
                    var generatorCurrentLoss = generatorFitter.CurrentLoss;
                    generatorFitter.ResetLossAndMetricAccumulators();

                    var discriminatorCurrentLoss = discriminatorFitter.CurrentLoss;
                    discriminatorFitter.ResetLossAndMetricAccumulators();

                    var traceOutput = $"Epoch: {epoch + 1:000} Generator Loss = {generatorCurrentLoss:F8}, Discriminator Loss = {discriminatorCurrentLoss:F8}";
                    Trace.WriteLine(traceOutput);

                    ++epoch;
                }
            }

            // Sample 6x6 images from generator.
            var samples = 6 * 6;
            var batch   = noiseMinibatchSource.GetNextMinibatch(samples, device);
            var noise   = batch.minibatch;

            var predictor  = new Predictor(generatorNetwork, device);
            var images     = predictor.PredictNextStep(noise);
            var imagesData = images.SelectMany(t => t).ToArray();

            // Show examples
            var app    = new Application();
            var window = new PlotWindowBitMap("Generated Images", imagesData, 28, 28, 1, true);

            window.Show();
            app.Run(window);
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Imdb";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "imdb_sparse_train_50w.txt");
            var testFilePath          = Path.Combine(baseDataDirectoryPath, "imdb_sparse_test_50w.txt");

            // Define the input and output shape.
            var inputShape      = new int[] { 129888 + 4 }; // Number distinct input words + offset for one-hot, sparse
            var numberOfClasses = 2;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType, isSparse: true)
                          .Embedding(32, weightInit(), dataType, device)
                          .LSTMStack(32, 1, weightInit(), false, device, dataType)
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Since we are processing sequence data,
            // wrap network in sequenceLast.
            network = CNTKLib.SequenceLast(network);

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType,
                                                        dynamicAxes: new List <Axis>()
            {
                Axis.DefaultBatchAxis()
            },
                                                        isSparse: false);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.Adam(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            // Network will be trained using the training set,
            // and tested using the test set.
            var featuresName = "x";
            var targetsName  = "y";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { featuresName, inputVariable },
                { targetsName, targetVariable },
            };

            // The order of the training data is randomize.
            var train = CreateMinibatchSource(trainFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, randomize: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice randomization is switched off for test data.
            var test = CreateMinibatchSource(testFilePath, featuresName, targetsName,
                                             numberOfClasses, inputShape, randomize: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            var history = model.Fit(trainingSource, epochs: 100, batchSize: 512,
                                    validationMinibatchSource: testSource);

            // Trace loss and validation history
            TraceLossValidationHistory(history);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");

            // Write first ten predictions
            var predictions = model.Predict(testSource)
                              .Take(10);

            // Use tensor data directly, since only 1 element pr. sample.
            Trace.WriteLine($"Predictions: [{string.Join(", ", predictions.Select(p => p.First()))}]");
        }
Esempio n. 7
0
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\CatsAndDogs";
            var mapFiles = PrepareMapFiles(baseDataDirectoryPath);

            // Define the input and output shape.
            var inputShape      = new int[] { 150, 150, 3 };
            var numberOfClasses = 2;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 64, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 128, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 128, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.RMSProp(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            var featuresName = "features";
            var targetsName  = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { featuresName, inputVariable },
                { targetsName, targetVariable },
            };
            var train = CreateMinibatchSource(mapFiles.trainFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, augmentation: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice augmentation is switched off for validation data.
            var valid = CreateMinibatchSource(mapFiles.validFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, augmentation: false);
            var validationSource = new CntkMinibatchSource(valid, nameToVariable);

            // Notice augmentation is switched off for test data.
            var test = CreateMinibatchSource(mapFiles.testFilePath, featuresName, targetsName,
                                             numberOfClasses, inputShape, augmentation: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            model.Fit(trainMinibatchSource: trainingSource,
                      epochs: 100, batchSize: 32,
                      validationMinibatchSource: validationSource);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");

            // Save model.
            model.Network.Save("cats_and_dogs_small_2.cntk");
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");
            var testFilePath          = Path.Combine(baseDataDirectoryPath, "Test-28x28_cntk_text.txt");

            // Define the input and output shape.
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var input = Layers.Input(inputShape, dataType);
            // scale input between 0 and 1.
            var scaledInput = CNTKLib.ElementTimes(Constant.Scalar(0.00390625f, device), input);

            var network = scaledInput
                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()

                          .Dense(64, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.RMSProp(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            // Network will be trained using the training set,
            // and tested using the test set.

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "features", inputVariable },
                { "labels", targetVariable },
            };

            // The order of the training data is randomize.
            var train          = CreateMinibatchSource(trainFilePath, nameToVariable, randomize: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice randomization is switched off for test data.
            var test       = CreateMinibatchSource(testFilePath, nameToVariable, randomize: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            model.Fit(trainingSource, epochs: 5, batchSize: 64);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotUniform(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Setup input dimensions and variables.
            var inputShape          = NDShape.CreateNDShape(new int[] { 28, 28, 1 });
            var InputVariable       = Variable.InputVariable(inputShape, dataType);
            var scaledInputVariable = CNTKLib.ElementTimes(Constant.Scalar(0.00390625f, device), InputVariable);

            const int latentSpaceSize = 2;

            // Setup the encoder, this encodes the input into a mean and variance parameter.
            var(mean, logVariance) = Encoder(scaledInputVariable, latentSpaceSize, weightInit, biasInit, device, dataType);

            // add latent space sampling. This will draw a latent point using a small random epsilon.
            var epsilon            = CNTKLib.NormalRandom(new int[] { latentSpaceSize }, dataType);
            var latentSpaceSampler = CNTKLib.Plus(mean, CNTKLib.ElementTimes(CNTKLib.Exp(logVariance), epsilon));

            // add decoder, this decodes from latent space back to an image.
            var encoderDecoderNetwork = Decoder(latentSpaceSampler, weightInit, biasInit, device, dataType);

            // Create minibatch source for providing the input images.
            var nameToVariable = new Dictionary <string, Variable> {
                { "features", encoderDecoderNetwork.Arguments[0] }
            };
            var minibatchSource = CreateMinibatchSource(trainFilePath, nameToVariable, randomize: true);

            // Reconstruction loss. Forces the decoded samples to match the initial inputs.
            var reconstructionLoss = Losses.BinaryCrossEntropy(encoderDecoderNetwork.Output, scaledInputVariable);

            // Regularization loss. Helps in learning well-formed latent spaces and reducing overfitting to the training data.
            var regularizationLoss = RegularizationLoss(mean, logVariance, dataType);

            // Overall loss function. Sum of reconstruction- and regularization loss.
            var loss = CNTKLib.Plus(reconstructionLoss, regularizationLoss);

            // Setup trainer.
            var learner = Learners.Adam(encoderDecoderNetwork.Parameters(), learningRate: 0.001, momentum: 0.9);
            var trainer = Trainer.CreateTrainer(encoderDecoderNetwork, loss, loss, new List <Learner> {
                learner
            });
            // Create model.
            var model = new Model(trainer, encoderDecoderNetwork, dataType, device);

            Trace.WriteLine(model.Summary());

            // Train the model.
            model.Fit(minibatchSource, batchSize: 16, epochs: 10);

            //// Use the decoder to sample 15x15 images across the latent space.

            // Image generation only requires use of the decoder part of the network.
            // Clone and replace the latentSpaceSampler from training with a new decoder input variable.
            var decoderInputVariable = Variable.InputVariable(latentSpaceSampler.Output.Shape, dataType);
            var replacements         = new Dictionary <Variable, Variable>()
            {
                { latentSpaceSampler, decoderInputVariable }
            };
            var decoder = encoderDecoderNetwork.Clone(ParameterCloningMethod.Freeze, replacements);

            // Sample 15x15 samples from the latent space
            var minibatch = SampleMinibatchForGrid(device, decoderInputVariable, gridSize: 15);

            // Transform from points in latent space to images using the decoder.
            var predictor  = new Predictor(decoder, device);
            var images     = predictor.PredictNextStep(minibatch);
            var imagesData = images.SelectMany(t => t).ToArray();

            // Show examples.
            var app    = new Application();
            var window = new PlotWindowBitMap("Generated Images", imagesData, 28, 28, 1, true);

            window.Show();
            app.Run(window);
        }