static void Main(string[] args)
        {
            // unpack archive
            Console.WriteLine("Loading data...");
            if (!System.IO.File.Exists("x_train.bin"))
            {
                DataUtil.Unzip(@"..\..\..\..\..\imdb_data.zip", ".");
            }

            // load training and test data
            var training_data   = DataUtil.LoadBinary <float>("x_train.bin", 25000, 10000);
            var training_labels = DataUtil.LoadBinary <float>("y_train.bin", 25000);
            var test_data       = DataUtil.LoadBinary <float>("x_test.bin", 25000, 10000);
            var test_labels     = DataUtil.LoadBinary <float>("y_test.bin", 25000);

            // create feature and label variables
            var features = NetUtil.Var(new[] { 10000 }, DataType.Float);
            var labels   = NetUtil.Var(new[] { 1 }, DataType.Float);

            // create neural network
            var network = features
                          .Dense(16, CNTKLib.ReLU)
                          .Dense(16, CNTKLib.ReLU)
                          .Dense(1, CNTKLib.Sigmoid)
                          .ToNetwork();

            // create loss and test functions
            var lossFunc     = CNTKLib.BinaryCrossEntropy(network.Output, labels);
            var accuracyFunc = NetUtil.BinaryAccuracy(network.Output, labels);

            // use the Adam learning algorithm
            var learner = network.GetAdamLearner(
                learningRateSchedule: (0.001, 1),
                momentumSchedule: (0.9, 1),
                unitGain: true);

            // get a trainer for training, and an evaluator for testing the network
            var trainer   = network.GetTrainer(learner, lossFunc, accuracyFunc);
            var evaluator = network.GetEvaluator(accuracyFunc);

            // declare some variables
            var trainingError   = new List <double>();
            var validationError = new List <double>();
            var maxEpochs       = 7;
            var batchSize       = 32;
            var batchCount      = 0;
            var error           = 0.0;

            // train for a number of epochs
            Console.WriteLine("Training network...");
            for (int epoch = 0; epoch < maxEpochs; epoch++)
            {
                error      = 0.0;
                batchCount = 0;

                // train the network using batches
                training_data.Index().Shuffle().Batch(
                    batchSize,
                    (indices, begin, end) =>
                {
                    // get the current batch for training
                    var featureBatch = features.GetBatch(training_data, indices, begin, end);
                    var labelBatch   = labels.GetBatch(training_labels, indices, begin, end);

                    // train the network on the batch
                    var result = trainer.TrainBatch(
                        new[]
                    {
                        (features, featureBatch),
                        (labels, labelBatch)
                    },