Пример #1
0
        void create_model(ref CNTK.Function model, ref float[][] labels)
        {
            var target_image          = preprocess_image(target_image_path, img_height, img_width);
            var style_reference_image = preprocess_image(style_reference_image_path, img_height, img_width);
            var base_model            = create_base_content_and_styles_model(img_height, img_width);

            labels = compute_labels(base_model, target_image, style_reference_image);

            var dream_weights_init = new CNTK.NDArrayView(new int[] { img_width, img_height, 3 }, target_image, computeDevice);
            var dream_weights      = new CNTK.Parameter(dream_weights_init, "the_dream");
            var dummy_features     = CNTK.Variable.InputVariable(new int[] { 1 }, CNTK.DataType.Float, "dummy_features");
            var dream_layer        = CNTK.CNTKLib.ElementTimes(dream_weights, dummy_features, "the_dream_layler");

            var replacements = new Dictionary <CNTK.Variable, CNTK.Variable>()
            {
                { base_model.Arguments[0], dream_layer.Output }
            };

            model = base_model.Clone(CNTK.ParameterCloningMethod.Freeze, replacements);

            var all_outputs = new List <CNTK.Variable>()
            {
                dream_layer
            };

            all_outputs.AddRange(model.Outputs);
            model = CNTK.Function.Combine(all_outputs, name: "overall_model");
        }
Пример #2
0
        /// <summary>
        /// Use the generator to create a list of fake images/
        /// </summary>
        /// <param name="generator">The generator to use.</param>
        /// <param name="batchSize">The batch size.</param>
        /// <param name="latentDimensions">The number of dimensions in the latent input vector.</param>
        /// <returns>A list of images created by the generator.</returns>
        public static IList <IList <float> > GenerateImages(
            CNTK.Function generator,
            int batchSize,
            int latentDimensions)
        {
            // set up a Gaussian random number generator
            var random         = new Random();
            var gaussianRandom = new GaussianRandom(random);

            // set up randomized input for the generator
            var random_latent_vectors    = gaussianRandom.getFloatSamples(batchSize * latentDimensions);
            var random_latent_vectors_nd = new CNTK.NDArrayView(new int[] { latentDimensions, 1, batchSize }, random_latent_vectors, NetUtil.CurrentDevice);
            var generator_inputs         = new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { generator.Arguments[0], new CNTK.Value(random_latent_vectors_nd) }
            };
            var generator_outputs = new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { generator.Output, null }
            };

            // run the generator and collect the images
            generator.Evaluate(generator_inputs, generator_outputs, NetUtil.CurrentDevice);
            return(generator_outputs[generator.Output].GetDenseData <float>(generator.Output));
        }
Пример #3
0
        /// <summary>
        /// Adds a dream layer to a neural network.
        /// </summary>
        /// <param name="input">The neural network to extend.</param>
        /// <param name="image">The content image.</param>
        /// <param name="width">The width of the content image.</param>
        /// <param name="height">The height of the content image.</param>
        /// <returns>The neural network extended with a dream layer.</returns>
        public static CNTK.Function DreamLayer(
            this CNTK.Function input,
            float[] image,
            int width,
            int height)
        {
            // set up the dream layer
            var dream_weights_init = new CNTK.NDArrayView(new int[] { width, height, 3 }, image, NetUtil.CurrentDevice);
            var dream_weights      = new CNTK.Parameter(dream_weights_init, "the_dream");
            var dummy_features     = CNTK.Variable.InputVariable(new int[] { 1 }, CNTK.DataType.Float, "dummy_features");
            var dream_layer        = CNTK.CNTKLib.ElementTimes(dream_weights, dummy_features, "the_dream_layer");

            // combine the dream layer with the content and style layers
            var replacements = new Dictionary <CNTK.Variable, CNTK.Variable>()
            {
                { input.Arguments[0], dream_layer.Output }
            };
            var model = input.Clone(CNTK.ParameterCloningMethod.Freeze, replacements);

            // return the finished model
            var all_outputs = new List <CNTK.Variable>()
            {
                dream_layer
            };

            all_outputs.AddRange(model.Outputs);
            return(CNTK.Function.Combine(all_outputs, name: "overall_model"));
        }
Пример #4
0
        CNTK.Value create_x_minibatch(bool sequence_mode, CNTK.Variable x, GeneratorsInfo gi, SamplesTargets st, CNTK.DeviceDescriptor computeDevice)
        {
            if (sequence_mode == false)
            {
                return(CNTK.Value.CreateBatch(x.Shape, st.samples, computeDevice));
            }
            var sequence_length = gi.lookback / gi.step;
            var minibatch_size  = st.samples.Length / sequence_length / gi.num_records;
            var x_shape         = CNTK.NDShape.CreateNDShape(new int[] { gi.num_records, sequence_length, minibatch_size });
            var ndArrayView     = new CNTK.NDArrayView(x_shape, st.samples, computeDevice, readOnly: true);

            return(new CNTK.Value(ndArrayView));
        }
        static CNTK.NDArrayView[] get_minibatch_data_CPU(CNTK.NDShape shape, float[][] src, int indices_begin, int indices_end)
        {
            var num_indices = indices_end - indices_begin;
            var result      = new CNTK.NDArrayView[num_indices];

            var row_index = 0;

            for (var index = indices_begin; index != indices_end; index++)
            {
                var dataBuffer  = src[index];
                var ndArrayView = new CNTK.NDArrayView(shape, dataBuffer, CNTK.DeviceDescriptor.CPUDevice, true);
                result[row_index++] = ndArrayView;
            }
            return(result);
        }
        void generateImages()
        {
            if (model == null)
            {
                System.Diagnostics.Debug.Assert(System.IO.File.Exists(model_filename));
                model = CNTK.Function.Load(model_filename, computeDevice);
            }

            var node_z        = model.FindByName("input_z");
            var decoder_start = Util.find_function_with_input(model, node_z);
            var z_sample_var  = CNTK.Variable.InputVariable(node_z.Output.Shape, CNTK.DataType.Float, "z_sample");
            var replacements  = new Dictionary <CNTK.Variable, CNTK.Variable>()
            {
                { node_z, z_sample_var }
            };
            var decoder = model.Clone(CNTK.ParameterCloningMethod.Freeze, replacements);

            var n                     = 15; // figure with 15x15 digits
            var xy_buffer             = new float[n * n * 2];
            var sample_start          = -2f;
            var sample_interval_width = 4f;

            for (int i = 0, pos = 0; i < n; i++)
            {
                for (int j = 0; j < n; j++)
                {
                    xy_buffer[pos++] = sample_start + (sample_interval_width / (n - 1)) * i;
                    xy_buffer[pos++] = sample_start + (sample_interval_width / (n - 1)) * j;
                }
            }

            var ndArrayView = new CNTK.NDArrayView(new int[] { 2, 1, xy_buffer.Length / 2 }, xy_buffer, computeDevice, true);
            var value       = new CNTK.Value(ndArrayView);
            var inputs_dir  = new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { z_sample_var, value }
            };
            var outputs_dir = new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { decoder.Output, null }
            };

            decoder.Evaluate(inputs_dir, outputs_dir, computeDevice);
            var values = outputs_dir[decoder.Output].GetDenseData <float>(decoder.Output);

            plotImages(values);
        }
Пример #7
0
        void for_debugging()
        {
#if false
            var shape       = data[features_stream_info].data.Shape;
            var numElements = shape.TotalSize;
            var buffer      = new float[numElements];
            var buffer_cpu  = new CNTK.NDArrayView(shape, buffer, CNTK.DeviceDescriptor.CPUDevice);
            buffer_cpu.CopyFrom(data[features_stream_info].data.Data);
            var firstImage = new float[3 * 150 * 150];
            System.Array.Copy(buffer, firstImage.Length, firstImage, 0, firstImage.Length);
            var wpfApp = new System.Windows.Application();
            wpfApp.Run(new PlotWindow(firstImage));
            var mel    = new float[40];
            var nd_cpu = new CNTK.NDArrayView(CNTK.NDShape.CreateNDShape(new int[] { 2, 1, (int)data[labels_stream_info].numberOfSamples }), mel, CNTK.DeviceDescriptor.CPUDevice);
            nd_cpu.CopyFrom(data[labels_stream_info].data.Data);
#endif
        }
Пример #8
0
        /// <summary>
        /// Get a batch from the given variable.
        /// </summary>
        /// <param name="variable">The variable to use.</param>
        /// <param name="source">The variable data.</param>
        /// <param name="begin">The index of the first value to use.</param>
        /// <param name="end">The index of the last value to use.</param>
        /// <returns>A batch of values taken from the given variable.</returns>
        public static CNTK.Value GetBatch(
            this CNTK.Variable variable,
            float[][] source,
            int begin,
            int end)
        {
            var num_indices = end - begin;
            var result      = new CNTK.NDArrayView[num_indices];
            var row_index   = 0;

            for (var index = begin; index != end; index++)
            {
                var dataBuffer  = source[index];
                var ndArrayView = new CNTK.NDArrayView(variable.Shape, dataBuffer, CNTK.DeviceDescriptor.CPUDevice, true);
                result[row_index++] = ndArrayView;
            }
            return(CNTK.Value.Create(variable.Shape, result, NetUtil.CurrentDevice, true));
        }
Пример #9
0
        public static (CNTK.Value featureBatch, CNTK.Value labelBatch) GetMisleadingBatch(
            CNTK.Function gan,
            int batchSize,
            int latentDimensions)
        {
            // set up a Gaussian random number generator
            var random         = new Random();
            var gaussianRandom = new GaussianRandom(random);

            // prepare a batch to fool the discriminator: we generate fake images
            // but we label them as real with label=0
            var random_latent_vectors    = gaussianRandom.getFloatSamples(batchSize * latentDimensions);
            var misleading_targets       = new float[batchSize];
            var random_latent_vectors_nd = new CNTK.NDArrayView(new int[] { latentDimensions, 1, batchSize }, random_latent_vectors, NetUtil.CurrentDevice);

            // return results
            return(
                new CNTK.Value(random_latent_vectors_nd),
                CNTK.Value.CreateBatch(new CNTK.NDShape(0), misleading_targets, NetUtil.CurrentDevice, true)
                );
        }
Пример #10
0
        float[][] compute_labels(CNTK.Function model, float[] target_image, float[] style_reference_image)
        {
            var input_shape = model.Arguments[0].Shape.Dimensions.ToArray();

            System.Diagnostics.Debug.Assert(input_shape[0] * input_shape[1] * input_shape[2] == target_image.Length);
            System.Diagnostics.Debug.Assert(target_image.Length == style_reference_image.Length);

#if false
            var cpuDevice                = CNTK.DeviceDescriptor.CPUDevice;
            var target_image_nd          = new CNTK.NDArrayView(input_shape, target_image, cpuDevice, readOnly: true);
            var style_reference_image_nd = new CNTK.NDArrayView(input_shape, style_reference_image, cpuDevice, readOnly: true);
            var batch_nd = new CNTK.NDArrayView[] { target_image_nd, style_reference_image_nd };
            var batch    = CNTK.Value.Create(input_shape, batch_nd, computeDevice, readOnly: true);
#else
            var batch_buffer = new float[2 * target_image.Length];
            Array.Copy(target_image, 0, batch_buffer, 0, target_image.Length);
            Array.Copy(style_reference_image, 0, batch_buffer, target_image.Length, target_image.Length);
            var batch_nd = new CNTK.NDArrayView(new int[] { model.Arguments[0].Shape[0], model.Arguments[0].Shape[1], model.Arguments[0].Shape[2], 1, 2 }, batch_buffer, computeDevice);
            var batch    = new CNTK.Value(batch_nd);
#endif
            var inputs = new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { model.Arguments[0], batch }
            };
            var outputs = new Dictionary <CNTK.Variable, CNTK.Value>();
            foreach (var output in model.Outputs)
            {
                outputs.Add(output, null);
            }
            model.Evaluate(inputs, outputs, computeDevice);

            float[][] labels = new float[model.Outputs.Count][];
            labels[0] = outputs[model.Outputs[0]].GetDenseData <float>(model.Outputs[0])[0].ToArray();
            for (int i = 1; i < labels.Length; i++)
            {
                labels[i] = outputs[model.Outputs[i]].GetDenseData <float>(model.Outputs[i])[1].ToArray();
            }

            return(labels);
        }
Пример #11
0
        Dictionary <CNTK.Variable, CNTK.Value> create_batch(CNTK.Function model, float[][] labels)
        {
            var dict_inputs = new Dictionary <CNTK.Variable, CNTK.Value>();

            for (int i = 0; i < model.Arguments.Count; i++)
            {
                var loss_input_variable = model.Arguments[i];
                if (loss_input_variable.Name == "dummy_features")
                {
                    var dummy_scalar_buffer = new float[] { 1 };
                    var dummy_scalar_nd     = new CNTK.NDArrayView(new int[] { 1 }, dummy_scalar_buffer, computeDevice, readOnly: true);
                    dict_inputs[loss_input_variable] = new CNTK.Value(dummy_scalar_nd);
                }
                else
                {
                    var cs_index = Int32.Parse(loss_input_variable.Name.Substring("content_and_style_".Length));
                    var nd       = new CNTK.NDArrayView(loss_input_variable.Shape, labels[cs_index], computeDevice, readOnly: true);
                    dict_inputs[loss_input_variable] = new CNTK.Value(nd);
                }
            }
            return(dict_inputs);
        }
Пример #12
0
        /// <summary>
        /// Calculate the output labels for style transfer.
        /// </summary>
        /// <param name="model">The neural network to use.</param>
        /// <param name="contentImage">The content image to use.</param>
        /// <param name="styleImage">The style image to use.</param>
        /// <returns></returns>
        public static float[][] CalculateLabels(CNTK.Function model, float[] contentImage, float[] styleImage)
        {
            // make sure the content image dimensions match the neural network input size
            // make sure the content and style images are the same size
            var input_shape = model.Arguments[0].Shape.Dimensions.ToArray();

            System.Diagnostics.Debug.Assert(input_shape[0] * input_shape[1] * input_shape[2] == contentImage.Length);
            System.Diagnostics.Debug.Assert(contentImage.Length == styleImage.Length);

            // set up a batch with the content and the style image
            var batch_buffer = new float[2 * contentImage.Length];

            Array.Copy(contentImage, 0, batch_buffer, 0, contentImage.Length);
            Array.Copy(styleImage, 0, batch_buffer, contentImage.Length, contentImage.Length);
            var batch_nd = new CNTK.NDArrayView(new int[] { model.Arguments[0].Shape[0], model.Arguments[0].Shape[1], model.Arguments[0].Shape[2], 1, 2 }, batch_buffer, NetUtil.CurrentDevice);
            var batch    = new CNTK.Value(batch_nd);

            // let the model evaluate the batch
            var inputs = new Dictionary <CNTK.Variable, CNTK.Value>()
            {
                { model.Arguments[0], batch }
            };
            var outputs = new Dictionary <CNTK.Variable, CNTK.Value>();

            foreach (var output in model.Outputs)
            {
                outputs.Add(output, null);
            }
            model.Evaluate(inputs, outputs, NetUtil.CurrentDevice);

            // collect and return the model outputs
            float[][] labels = new float[model.Outputs.Count][];
            labels[0] = outputs[model.Outputs[0]].GetDenseData <float>(model.Outputs[0])[0].ToArray();
            for (int i = 1; i < labels.Length; i++)
            {
                labels[i] = outputs[model.Outputs[i]].GetDenseData <float>(model.Outputs[i])[1].ToArray();
            }
            return(labels);
        }
Пример #13
0
        /// <summary>
        /// Get a sequence batch from the given variable.
        /// </summary>
        /// <param name="variable">The variable to use.</param>
        /// <param name="sequenceLength">The number of time periods in the data sequence.</param>
        /// <param name="source">The variable data.</param>
        /// <param name="begin">The index of the first value to use.</param>
        /// <param name="end">The index of the last value to use.</param>
        /// <returns>A batch of values taken from the given variable.</returns>
        public static CNTK.Value GetSequenceBatch(
            this CNTK.Variable variable,
            int sequenceLength,
            float[][] source,
            int begin,
            int end)
        {
            System.Diagnostics.Debug.Assert((variable.Shape.Dimensions.Count == 0) || ((variable.Shape.Dimensions.Count == 1) && (variable.Shape.Dimensions[0] == 1)));
            System.Diagnostics.Debug.Assert(source[0].Length == sequenceLength);
            var num_indices = end - begin;
            var cpu_blob    = new float[num_indices * sequenceLength];
            var row_index   = 0;

            for (var index = begin; index != end; index++)
            {
                System.Buffer.BlockCopy(source[index], 0, cpu_blob, row_index * sequenceLength * sizeof(float), sequenceLength * sizeof(float));
                row_index++;
            }
            var blob_shape  = variable.Shape.AppendShape(new int[] { sequenceLength, end - begin });
            var ndArrayView = new CNTK.NDArrayView(blob_shape, cpu_blob, NetUtil.CurrentDevice);

            return(new CNTK.Value(ndArrayView));
        }
Пример #14
0
        void train()
        {
            var random         = new Random();
            var gaussianRandom = new FromStackOverflow.GaussianRandom(random);

            create_gan();
            load_data();
            var iterations = 100000;
            var batch_size = 20;
            var save_dir   = "images";

            System.IO.Directory.CreateDirectory(save_dir);
            var start = 0;

            for (int step = 0; step < iterations; step++)
            {
                // use the generator to generate fake images
                var random_latent_vectors    = gaussianRandom.getFloatSamples(batch_size * latent_dim);
                var random_latent_vectors_nd = new CNTK.NDArrayView(new int[] { latent_dim, 1, batch_size }, random_latent_vectors, computeDevice);
                var generator_inputs         = new Dictionary <CNTK.Variable, CNTK.Value>()
                {
                    { generator.Arguments[0], new CNTK.Value(random_latent_vectors_nd) }
                };
                var generator_outputs = new Dictionary <CNTK.Variable, CNTK.Value>()
                {
                    { generator.Output, null }
                };
                generator.Evaluate(generator_inputs, generator_outputs, computeDevice);
                var generated_images = generator_outputs[generator.Output].GetDenseData <float>(generator.Output);

                // train the discriminator: the first half of the mini-batch are the fake images (marked with label='1')
                // whereas the second half are real images (marked with label='0')
                var combined_images = new float[2 * batch_size][];
                var labels          = new float[2 * batch_size];
                start = Math.Min(start, x_train.Length - batch_size);
                for (int i = 0; i < batch_size; i++)
                {
                    combined_images[i] = generated_images[i].ToArray();
                    labels[i]          = (float)(1 + 0.05 * gaussianRandom.NextGaussian());

                    combined_images[i + batch_size] = x_train[start + i];
                    labels[i + batch_size]          = (float)(0.05 * gaussianRandom.NextGaussian());
                }
                start += batch_size;
                if (start >= x_train.Length)
                {
                    start = 0;
                }

                var combined_images_minibatch = Util.get_tensors(new int[] { width, height, channels }, combined_images, 0, combined_images.Length, computeDevice);
                var labels_minibatch          = CNTK.Value.CreateBatch(new CNTK.NDShape(0), labels, computeDevice, true);
                var discriminator_minibatch   = new Dictionary <CNTK.Variable, CNTK.Value>()
                {
                    { discriminator.Arguments[0], combined_images_minibatch },
                    { label_var, labels_minibatch }
                };
                discriminator_trainer.TrainMinibatch(discriminator_minibatch, true, computeDevice);
                var d_loss = discriminator_trainer.PreviousMinibatchLossAverage();

                // train the gan: the generator will try to fool the discriminator: we generate fake
                // images, but we label them as "real" (with label='0')
                random_latent_vectors = gaussianRandom.getFloatSamples(batch_size * latent_dim);
                var misleading_targets = new float[batch_size];
                random_latent_vectors_nd = new CNTK.NDArrayView(new int[] { latent_dim, 1, batch_size }, random_latent_vectors, computeDevice);
                var gan_inputs = new Dictionary <CNTK.Variable, CNTK.Value>()
                {
                    { gan.Arguments[0], new CNTK.Value(random_latent_vectors_nd) },
                    { label_var, CNTK.Value.CreateBatch(new CNTK.NDShape(0), misleading_targets, computeDevice, true) }
                };
                gan_trainer.TrainMinibatch(gan_inputs, true, computeDevice);
                var g_loss = gan_trainer.PreviousMinibatchLossAverage();

                if (step % 100 == 0)
                {
                    Console.WriteLine($"discriminator loss at step {step}: {d_loss:F3}");
                    Console.WriteLine($"adversarial loss at step {step}: {g_loss:F3}");

                    // Save one generated image
                    var img            = generated_images[0].ToArray();
                    var img_bytes      = Util.convert_from_channels_first(img, scaling: 255, invertOrder: true);
                    var mat            = new OpenCvSharp.Mat(height, width, OpenCvSharp.MatType.CV_8UC3, img_bytes, 3 * width);
                    var image_filename = $"generated_frog_{step}.png";
                    var image_path     = System.IO.Path.Combine(save_dir, image_filename);
                    mat.SaveImage(image_path);
                    mat.Dispose(); mat = null;

                    // Save one real image for comparison
                    img            = x_train[Math.Max(start - batch_size, 0)];
                    img_bytes      = Util.convert_from_channels_first(img, scaling: 255, invertOrder: true);
                    mat            = new OpenCvSharp.Mat(height, width, OpenCvSharp.MatType.CV_8UC3, img_bytes, 3 * width);
                    image_filename = $"real_frog_{step}.png";
                    image_path     = System.IO.Path.Combine(save_dir, image_filename);
                    mat.SaveImage(image_path);
                    mat.Dispose(); mat = null;
                }
            }
        }