Ejemplo n.º 1
0
        private static Function CreateMLPClassifier(DeviceDescriptor device, int numOutputClasses, int hiddenLayerDim, Function input, string classifierName)
        {
            Function dense1           = CNTKHelper.Dense(input, hiddenLayerDim, device, Activation.Sigmoid, "");
            Function classifierOutput = CNTKHelper.Dense(dense1, numOutputClasses, device, Activation.None, classifierName);

            return(classifierOutput);
        }
Ejemplo n.º 2
0
        public Model()
        {
            #region Create Models
            mlpModel = CNTKHelper.CreateMLPModel(device, TEST1_SIZE, 1, TEST1_SIZE);

            cnnModel = CNTKHelper.CreateCNNModel(device, TEST1_SIZE, 1, TEST1_SIZE);
            #endregion
        }
Ejemplo n.º 3
0
        private static float[] ExpectedOutput(int size)
        {
            var result = new float[size * size];

            for (int i = 0; i < size; i++)
            {
                var oneHot = CNTKHelper.OneHot(i, size);
                for (int j = 0; j < size; j++)
                {
                    result[i * size + j] = oneHot[j];
                }
            }
            return(result);
        }
Ejemplo n.º 4
0
        private static float[] OneHotImages(int size)
        {
            var result = new float[size * size * size];

            for (int k = 0; k < size; k++)
            {
                var oneHot = CNTKHelper.OneHot(k, size);
                for (int i = 0; i < size; i++)
                {
                    for (int j = 0; j < size; j++)
                    {
                        result[k * size * size + i * size + j] = oneHot[i];
                    }
                }
            }
            return(result);
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Create convolution neural network
        /// </summary>
        /// <param name="features">input feature variable</param>
        /// <param name="outDims">number of output classes</param>
        /// <param name="device">CPU or GPU device to run the model</param>
        /// <param name="classifierName">name of the classifier</param>
        /// <returns>the convolution neural network classifier</returns>
        static Function CreateConvolutionalNeuralNetwork(Variable features, int inputLayers, int outDims, DeviceDescriptor device, string classifierName)
        {
            // 28x28x1 -> 14x14x4
            int kernelWidth1 = 3, kernelHeight1 = 3, outFeatureMapCount1 = 4;
            int hStride1 = 2, vStride1 = 2;
            int poolingWindowWidth1 = 3, poolingWindowHeight1 = 3;

            Function pooling1 = ConvolutionWithMaxPooling(features, device, kernelWidth1, kernelHeight1,
                                                          inputLayers, outFeatureMapCount1, hStride1, vStride1, poolingWindowWidth1, poolingWindowHeight1);

            // 14x14x4 -> 7x7x8
            int kernelWidth2 = 3, kernelHeight2 = 3, numInputChannels2 = outFeatureMapCount1, outFeatureMapCount2 = 8;
            int hStride2 = 2, vStride2 = 2;
            int poolingWindowWidth2 = 3, poolingWindowHeight2 = 3;

            Function pooling2 = ConvolutionWithMaxPooling(pooling1, device, kernelWidth2, kernelHeight2,
                                                          numInputChannels2, outFeatureMapCount2, hStride2, vStride2, poolingWindowWidth2, poolingWindowHeight2);

            Function denseLayer = CNTKHelper.Dense(pooling2, outDims, device, Activation.None, classifierName);

            return(denseLayer);
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Test a simple model which takes a one hot encoded digit as an input and returns the same as an output
        /// </summary>
        private void TrainAndEvaluateTest(Function model, Value inputValue)
        {
            #region Evaluate model before training

            var inputDataMap = new Dictionary <Variable, Value>()
            {
                { model.Arguments[0], inputValue }
            };
            var outputDataMap = new Dictionary <Variable, Value>()
            {
                { model.Output, null }
            };

            model.Evaluate(inputDataMap, outputDataMap, DeviceDescriptor.CPUDevice);

            IList <IList <float> > preTrainingOutput = outputDataMap[model.Output].GetDenseData <float>(model.Output);
            for (int i = 0; i < TEST1_SIZE; i++)
            {
                Trace.WriteLine($"Argmax({i}): {CNTKHelper.ArgMax(preTrainingOutput[i].ToArray())}");
            }
            #endregion

            #region Train Model
            var labels       = CNTKLib.InputVariable(new int[] { TEST1_SIZE }, DataType.Float, "Error Input");
            var trainingLoss = CNTKLib.CrossEntropyWithSoftmax(new Variable(model), labels, "lossFunction");
            var prediction   = CNTKLib.ClassificationError(new Variable(model), labels, "classificationError");

            // Set per sample learning rate
            CNTK.TrainingParameterScheduleDouble learningRatePerSample = new CNTK.TrainingParameterScheduleDouble(0.003125, 1);

            IList <Learner> parameterLearners = new List <Learner>()
            {
                Learner.SGDLearner(model.Parameters(), learningRatePerSample)
            };
            var trainer = Trainer.CreateTrainer(model, trainingLoss, prediction, parameterLearners);

            // Create expected output
            var expectedOutputValue = Value.CreateBatch <float>(new int[] { TEST1_SIZE }, ExpectedOutput(TEST1_SIZE), DeviceDescriptor.CPUDevice);

            var inputMiniBatch  = new MinibatchData(inputValue, TEST1_SIZE);
            var outputMiniBatch = new MinibatchData(expectedOutputValue, TEST1_SIZE);

            var arguments = new Dictionary <Variable, MinibatchData>
            {
                { model.Arguments[0], inputMiniBatch },
                { labels, outputMiniBatch }
            };
            int epochs = 5;
            while (epochs > 0)
            {
                trainer.TrainMinibatch(arguments, device);

                epochs--;
            }
            #endregion

            #region Evaluate Model after training

            outputDataMap = new Dictionary <Variable, Value>()
            {
                { model.Output, null }
            };
            model.Evaluate(inputDataMap, outputDataMap, DeviceDescriptor.CPUDevice);

            IList <IList <float> > postTrainingOutput = outputDataMap[model.Output].GetDenseData <float>(model.Output);
            int nbFail = 0;
            for (int i = 0; i < TEST1_SIZE; i++)
            {
                int prepTrainValue = CNTKHelper.ArgMax(preTrainingOutput[i].ToArray());
                int postTrainValue = CNTKHelper.ArgMax(postTrainingOutput[i].ToArray());
                if (i != postTrainValue)
                {
                    nbFail++;
                }
                Trace.WriteLine($"Argmax({i}): {prepTrainValue} ==>  {postTrainValue}");
            }
            Trace.WriteLine($"Failure rate = ({nbFail}/{TEST1_SIZE})");
            #endregion
        }