public static void WriteOutputs()
 {
     // enforce GC.
     GC.Collect();
     GC.WaitForPendingFinalizers();
     Console.WriteLine("\nPrint out saved object references.");
     try
     {
         Console.WriteLine("Device0: " + Device0.AsString() + ", Type: " + Device0.Type);
         Console.WriteLine("Axis0: " + Axis0.Name + ", IsStaticAxis: " + Axis0.IsStatic);
         Console.WriteLine("OutputVar: " + OutputVar.AsString() + ", Name: " + OutputVar.Name + ", Kind: " + Utils.VariableKindName(OutputVar.Kind) + ", Shape: " + OutputVar.Shape.AsString());
         Console.WriteLine("OutputVar0: " + OutputVar0.AsString() + ", Name: " + OutputVar0.Name + ", Kind: " + Utils.VariableKindName(OutputVar.Kind) + ", Shape: " + OutputVar0.Shape.AsString());
         Console.WriteLine("InputVar0: " + InputVar0.AsString() + ", Name: " + InputVar0.Name + ", Kind: " + Utils.VariableKindName(OutputVar.Kind) + ", Shape: " + InputVar0.Shape.AsString());
         Console.WriteLine("ArgumentVar0: " + ArgumentVar0.AsString() + ", Name: " + ArgumentVar0.Name + ", Kind: " + Utils.VariableKindName(OutputVar.Kind) + ", Shape: " + ArgumentVar0.Shape.AsString());
         Console.WriteLine("OutputVal: " + ", Device: " + OutputVal.Device.AsString() + ", Storage: " + OutputVal.StorageFormat + ", Shape: " + OutputVal.Shape.AsString() + "Data:");
         var outputData = OutputVal.GetDenseData <float>(OutputVar);
         CNTKLibraryManagedExamples.PrintOutput(OutputVar.Shape.TotalSize, outputData);
     }
     catch (Exception ex)
     {
         Console.WriteLine("Memory Tests Error: {0}\nCallStack: {1}\n Inner Exception: {2}", ex.Message, ex.StackTrace, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception");
         throw ex;
     }
     Console.WriteLine("\nAll saved object references are printed.");
 }
Пример #2
0
 public static void WriteOutputs()
 {
     // enforce GC.
     GC.Collect();
     GC.WaitForPendingFinalizers();
     Console.WriteLine("\nPrint out saved object references.");
     try
     {
         Console.WriteLine("Device0: " + Device0.AsString() + "Type: " + Device0.Type);
         Console.WriteLine("Axis0: " + Axis0.Name + "IsStaticAxis: " + Axis0.IsStatic);
         Console.WriteLine("OutputVar: " + OutputVar.AsString() + "Name: " + OutputVar.Name + "Shape: " + OutputVar.Shape.AsString());
         Console.WriteLine("OutputVar0: " + OutputVar0.AsString() + "Name: " + OutputVar0.Name + "Shape: " + OutputVar0.Shape.AsString());
         Console.WriteLine("InputVar0: " + InputVar0.AsString() + "Name: " + InputVar0.Name + "Shape: " + InputVar0.Shape.AsString());
         Console.WriteLine("ArgumentVar0: " + ArgumentVar0.AsString() + "Name: " + ArgumentVar0.Name + "Shape: " + ArgumentVar0.Shape.AsString());
     }
     catch (Exception ex)
     {
         Console.WriteLine("Memory Tests Error: {0}\nCallStack: {1}\n Inner Exception: {2}", ex.Message, ex.StackTrace, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception");
         throw ex;
     }
 }
Пример #3
0
        static void Main(string[] args)
        {
            string           deviceType = "CPU"; // Change to GPU if possible
            DeviceDescriptor device     = null;

            if (deviceType == "GPU")
            {
                device = DeviceDescriptor.GPUDevice(0);
            }
            else
            {
                device = DeviceDescriptor.CPUDevice;
            }

            Console.WriteLine($"Device {device.AsString()} {device.Type}[{device.Id}]");
            Console.WriteLine();

            int inputSize    = 2;
            int hiddenLayers = 4;
            int numClasses   = 1;

            var inputs = CNTKLib.InputVariable(new int[] { inputSize }, DataType.Float, "features");
            var labels = CNTKLib.InputVariable(new int[] { numClasses }, DataType.Float, "labels");

            var MLPmodel   = CreateModel(inputs, hiddenLayers, numClasses, device, "MLPmodel");
            var MLPtrainer = CreateModelTrainer(MLPmodel, inputs, labels);

            TrainFromArrays(MLPtrainer, inputs, labels, device);
            //TrainFromMiniBatchFile(MLPtrainer, inputs, labels, device);

            TestPrediction(MLPmodel, device);

            Console.WriteLine();
            Console.WriteLine("End");

            Console.ReadKey();
        }
Пример #4
0
        private static void Main(string[] args)
        {
            DeviceDescriptor device = DeviceDescriptor.CPUDevice;

            Console.WriteLine("Welcome to CNTK machine learning world");
            Console.WriteLine($"Device: {device.AsString()}");

            // Queue: Convert and create the CSV file for ML.net?
            Console.WriteLine("Do you want to convert and create the text minibatch file for CNTK? If the file is already exist, type 'N' to skip, else type 'Y' to avoid errors.");
            if (Console.ReadKey().Key == ConsoleKey.Y)
            {
                MNISTDataConvertor convertor = new MNISTDataConvertor();
                convertor.ConvertAndSave();
            }
            Console.WriteLine();

            string featureStreamName = "features";
            string labelsStreamName = "labels";
            string classifierName = "classifierOutput";
            int[] image_dimension = new int[] { 28, 28, 1 };
            int[] result_dimension = new int[] { 10 };
            int image_flat_size = 28 * 28;
            int number_class = 10;

            // Input and output
            Variable features = InputVariable(shape: image_dimension, dataType: DataType.Float, name: "features");
            Variable labels = InputVariable(shape: result_dimension, dataType: DataType.Float, name: "labels");

            // Scaled and CNN
            Function scaled_input = ElementTimes(Constant.Scalar(1.0f / 256.0f, device), features);
            MNISTConvolutionNN convolutionNN = new MNISTConvolutionNN(scaled_input, device, classifierName: classifierName);

            Function classifier_output = convolutionNN.CNN_Function;

            // Loss functions
            Function loss_function = CrossEntropyWithSoftmax(new Variable(classifier_output), labels, "lossFunction");
            Function perdiction = ClassificationError(new Variable(classifier_output), labels, "classificationError");

            // Learning rate
            TrainingParameterScheduleDouble trainingParameter = new TrainingParameterScheduleDouble(0.01125, 1);

            // Learners
            IList<Learner> learners = new List<Learner>()
            {
                Learner.SGDLearner(classifier_output.Parameters(), trainingParameter),
            };

            // Trainer
            Trainer trainer = Trainer.CreateTrainer(classifier_output, loss_function, perdiction, learners);

            // Minibatch(Training data)
            const uint minibatchSize = 35;
            int outputFrequencyMinibatch = 35;
            int Epchos = 15;
            IList<StreamConfiguration> streamConfigurations = new StreamConfiguration[]
            {
                new StreamConfiguration(featureStreamName, image_flat_size),
                new StreamConfiguration(labelsStreamName, number_class),
            };
            MinibatchSource minibatchSource = MinibatchSource.TextFormatMinibatchSource("mnist-train.txt", streamConfigurations, MinibatchSource.InfinitelyRepeat);
            StreamInformation featureStreamInfo = minibatchSource.StreamInfo(featureStreamName);
            StreamInformation labelStreamInfo = minibatchSource.StreamInfo(labelsStreamName);

            // Training
            int iteration = 0;
            while (Epchos > 0)
            {
                UnorderedMapStreamInformationMinibatchData minibatchData = minibatchSource.GetNextMinibatch(minibatchSize, device);
                Dictionary<Variable, MinibatchData> arguments = new Dictionary<Variable, MinibatchData>
                {
                    { features, minibatchData[featureStreamInfo] },
                    { labels, minibatchData[labelStreamInfo] }
                };

                trainer.TrainMinibatch(arguments, device);
                PrintTrainingProgress(trainer, iteration++, outputFrequencyMinibatch);

                if (minibatchData.Values.Any(a => a.sweepEnd))
                {
                    Epchos--;
                }
            }

            Console.WriteLine("*****Training done*****");

            // Save the trained model
            classifier_output.Save("cnn-model.mld");

            // Validate the model
            MinibatchSource minibatchSourceNewModel = MinibatchSource.TextFormatMinibatchSource("mnist-train.txt", streamConfigurations, MinibatchSource.FullDataSweep);
            MNISTConvolutionNN.ValidateModelWithMinibatchSource("cnn-model.mld", minibatchSourceNewModel,
                                image_dimension, number_class, featureStreamName, labelsStreamName, classifierName, device, maxCount: 10000);

            Console.ReadKey(true);
        }