void print_debugging_info() { if (computeDevice == null) { computeDevice = Util.get_compute_device(); } var features = CNTK.Variable.InputVariable(new int[] { 150, 150, 3 }, CNTK.DataType.Float, "features"); var adjusted_features = CNTK.CNTKLib.Plus(CNTK.Constant.Scalar <float>((float)(-110), computeDevice), features); var scalar_factor = CNTK.Constant.Scalar <float>((float)(1.0 / 255.0), computeDevice); var scaled_features = CNTK.CNTKLib.ElementTimes(scalar_factor, adjusted_features); var convolution_map_size = new int[] { 1, 1, CNTK.NDShape.InferredDimension, 3 }; var W = new CNTK.Parameter( CNTK.NDShape.CreateNDShape(convolution_map_size), CNTK.DataType.Float, CNTK.CNTKLib.GlorotUniformInitializer(CNTK.CNTKLib.DefaultParamInitScale, CNTK.CNTKLib.SentinelValueForInferParamInitRank, CNTK.CNTKLib.SentinelValueForInferParamInitRank, 1), computeDevice); var result = CNTK.CNTKLib.Convolution(W, scaled_features, strides: CNTK.NDShape.CreateNDShape(new int[] { 1 }), sharing: new CNTK.BoolVector(new bool[] { false }), autoPadding: new CNTK.BoolVector(new bool[] { true })); var model = VGG16.get_model(result, computeDevice); Util.PredorderTraverse(model); var shape = model.Output.Shape; Console.WriteLine(shape.AsString()); }
void run() { Console.Title = "Ch_05_Visualizing_Intermediate_Activations"; computeDevice = Util.get_compute_device(); var features = CNTK.Variable.InputVariable(new int[] { 150, 150, 3 }, CNTK.DataType.Float, "features"); var adjusted_features = CNTK.CNTKLib.Plus(CNTK.Constant.Scalar <float>((float)(-110), computeDevice), features, "adjusted features"); var scalar_factor = CNTK.Constant.Scalar <float>((float)(1.0 / 255.0), computeDevice); var scaled_features = CNTK.CNTKLib.ElementTimes(scalar_factor, adjusted_features, "scaled features"); var base_model = VGG16.get_model(scaled_features, computeDevice); Util.summary(base_model); var app = new System.Windows.Application(); var layer_names = new string[] { "pool1", "pool2", "pool3" }; var num_entries = new int[] { 64, 64, 256 }; for (int i = 0; i < layer_names.Length; i++) { var intermediate_node = base_model.FindByName(layer_names[i]); var model = CNTK.CNTKLib.Combine(new CNTK.VariableVector() { intermediate_node.Output }); var image = load_image_in_channels_first_format(cat_filename, 150, 150); var image_tensor = CNTK.Value.CreateBatch(features.Shape, image, computeDevice); var input_d = new Dictionary <CNTK.Variable, CNTK.Value>() { { features, image_tensor } }; var output_d = new Dictionary <CNTK.Variable, CNTK.Value>() { { model.Output, null } }; model.Evaluate(input_d, output_d, computeDevice); var outputValues = output_d[intermediate_node.Output].GetDenseData <float>(intermediate_node.Output); var feature_height = intermediate_node.Output.Shape[0]; var feature_width = intermediate_node.Output.Shape[1]; var activations = outputValues[0].Take(num_entries[i] * feature_width * feature_height).ToArray(); var window = new PlotWindowBitMap(layer_names[i], activations, feature_height, feature_width, 1); window.Show(); } app.Run(); }
Tuple <float[][], float[][]> extract_features(int start_index, int sample_count, string prefix) { var extracted_features = new float[2 * sample_count][]; var extracted_labels = new float[extracted_features.Length][]; var labels = CNTK.Variable.InputVariable(new int[] { 2 }, CNTK.DataType.Float, "labels"); var features = CNTK.Variable.InputVariable(new int[] { 150, 150, 3 }, CNTK.DataType.Float, "features"); var scalar_factor = CNTK.Constant.Scalar <float>((float)(1.0 / 255.0), computeDevice); var scaled_features = CNTK.CNTKLib.ElementTimes(scalar_factor, features); var conv_base = VGG16.get_model(scaled_features, computeDevice); //Util.PredorderTraverse(conv_base); var minibatch_source = create_minibatch_source(features.Shape, start_index, sample_count, prefix); var features_stream_info = minibatch_source.StreamInfo("features"); var labels_stream_info = minibatch_source.StreamInfo("labels"); var pos = 0; while (pos < extracted_features.Length) { var pos_end = Math.Min(pos + batch_size, extracted_features.Length); var data = minibatch_source.GetNextMinibatch((uint)(pos_end - pos), computeDevice); var input_d = new Dictionary <CNTK.Variable, CNTK.Value>() { { features, data[features_stream_info].data } }; var output_d = new Dictionary <CNTK.Variable, CNTK.Value>() { { conv_base.Output, null } }; conv_base.Evaluate(input_d, output_d, computeDevice); var minibatch_extracted_features = output_d[conv_base.Output].GetDenseData <float>(conv_base.Output); for (int i = 0; i < data[features_stream_info].numberOfSamples; i++) { extracted_features[pos + i] = minibatch_extracted_features[i].ToArray(); extracted_labels[pos + i] = new float[2]; extracted_labels[pos + i][i % 2] = 1; } pos = pos_end; } return(Tuple.Create(extracted_features, extracted_labels)); }
List <List <double> > train_with_augmentation(bool use_finetuning) { var labels = CNTK.Variable.InputVariable(new int[] { 2 }, CNTK.DataType.Float, "labels"); var features = CNTK.Variable.InputVariable(new int[] { 150, 150, 3 }, CNTK.DataType.Float, "features"); var scalar_factor = CNTK.Constant.Scalar <float>((float)(1.0 / 255.0), computeDevice); var scaled_features = CNTK.CNTKLib.ElementTimes(scalar_factor, features); var conv_base = VGG16.get_model(scaled_features, computeDevice, use_finetuning); var model = Util.Dense(conv_base, 256, computeDevice); model = CNTK.CNTKLib.ReLU(model); model = CNTK.CNTKLib.Dropout(model, 0.5); model = Util.Dense(model, 2, computeDevice); var loss_function = CNTK.CNTKLib.CrossEntropyWithSoftmax(model.Output, labels); var accuracy_function = CNTK.CNTKLib.ClassificationError(model.Output, labels); var pv = new CNTK.ParameterVector((System.Collections.ICollection)model.Parameters()); var learner = CNTK.CNTKLib.AdamLearner(pv, new CNTK.TrainingParameterScheduleDouble(0.0001, 1), new CNTK.TrainingParameterScheduleDouble(0.99, 1)); var trainer = CNTK.Trainer.CreateTrainer(model, loss_function, accuracy_function, new CNTK.Learner[] { learner }); var evaluator = CNTK.CNTKLib.CreateEvaluator(accuracy_function); var train_minibatch_source = create_minibatch_source(features.Shape, 0, 1000, "train", is_training: true, use_augmentations: true); var validation_minibatch_source = create_minibatch_source(features.Shape, 1000, 500, "validation", is_training: false, use_augmentations: false); var train_featuresStreamInformation = train_minibatch_source.StreamInfo("features"); var train_labelsStreamInformation = train_minibatch_source.StreamInfo("labels"); var validation_featuresStreamInformation = validation_minibatch_source.StreamInfo("features"); var validation_labelsStreamInformation = validation_minibatch_source.StreamInfo("labels"); var training_accuracy = new List <double>(); var validation_accuracy = new List <double>(); for (int epoch = 0; epoch < max_epochs; epoch++) { var startTime = DateTime.Now; // training phase var epoch_training_error = 0.0; var pos = 0; var num_batches = 0; while (pos < 2000) { var pos_end = Math.Min(pos + batch_size, 2000); var minibatch_data = train_minibatch_source.GetNextMinibatch((uint)(pos_end - pos), computeDevice); var feed_dictionary = new batch_t() { { features, minibatch_data[train_featuresStreamInformation] }, { labels, minibatch_data[train_labelsStreamInformation] } }; trainer.TrainMinibatch(feed_dictionary, computeDevice); epoch_training_error += trainer.PreviousMinibatchEvaluationAverage(); num_batches++; pos = pos_end; } epoch_training_error /= num_batches; training_accuracy.Add(1.0 - epoch_training_error); // evaluation phase var epoch_validation_error = 0.0; num_batches = 0; pos = 0; while (pos < 1000) { var pos_end = Math.Min(pos + batch_size, 1000); var minibatch_data = validation_minibatch_source.GetNextMinibatch((uint)(pos_end - pos), computeDevice); var feed_dictionary = new CNTK.UnorderedMapVariableMinibatchData() { { features, minibatch_data[validation_featuresStreamInformation] }, { labels, minibatch_data[validation_labelsStreamInformation] } }; epoch_validation_error += evaluator.TestMinibatch(feed_dictionary); pos = pos_end; num_batches++; } epoch_validation_error /= num_batches; validation_accuracy.Add(1.0 - epoch_validation_error); var elapsedTime = DateTime.Now.Subtract(startTime); Console.WriteLine($"Epoch {epoch + 1:D2}/{max_epochs}, training_accuracy={1.0 - epoch_training_error:F3}, validation accuracy:{1 - epoch_validation_error:F3}, elapsed time={elapsedTime.TotalSeconds:F1} seconds"); if (epoch_training_error < 0.001) { break; } } return(new List <List <double> >() { training_accuracy, validation_accuracy }); }