private Model Make_Discriminator_model() { Tensorflow.Keras.Activation activation = null; var image = keras.Input(img_shape); var x = keras.layers.Conv2D(128, kernel_size: 3, strides: (2, 2), padding: "same", activation: activation).Apply(image); x = keras.layers.LeakyReLU(LeakyReLU_alpha).Apply(x); x = keras.layers.BatchNormalization(momentum: 0.8f).Apply(x); x = keras.layers.Conv2D(256, 3, (2, 2), "same", activation: activation).Apply(x); x = keras.layers.BatchNormalization(momentum: 0.8f).Apply(x); x = keras.layers.LeakyReLU(LeakyReLU_alpha).Apply(x); x = keras.layers.Conv2D(512, 3, (2, 2), "same", activation: activation).Apply(x); x = keras.layers.BatchNormalization(momentum: 0.8f).Apply(x); x = keras.layers.LeakyReLU(LeakyReLU_alpha).Apply(x); x = keras.layers.Conv2D(1024, 3, (2, 2), "same", activation: activation).Apply(x); x = keras.layers.BatchNormalization(momentum: 0.8f).Apply(x); x = keras.layers.LeakyReLU(LeakyReLU_alpha).Apply(x); x = keras.layers.Flatten().Apply(x); x = keras.layers.Dense(1, activation: "sigmoid").Apply(x); var model = keras.Model(image, x); model.summary(); return(model); }
private Model Make_Generator_model() { Tensorflow.Keras.Activation activation = null; var model = keras.Sequential(); model.add(keras.layers.Dense(img_rows / 4 * img_cols / 4 * 256, activation: activation, input_shape: 100)); model.add(keras.layers.BatchNormalization(momentum: 0.8f)); model.add(keras.layers.LeakyReLU(LeakyReLU_alpha)); model.add(keras.layers.Reshape((7, 7, 256))); model.add(keras.layers.UpSampling2D()); model.add(keras.layers.Conv2D(128, 3, 1, padding: "same", activation: activation)); model.add(keras.layers.BatchNormalization(momentum: 0.8f)); model.add(keras.layers.LeakyReLU(LeakyReLU_alpha)); model.add(keras.layers.UpSampling2D()); model.add(keras.layers.Conv2D(64, 3, 1, padding: "same", activation: activation)); model.add(keras.layers.BatchNormalization(momentum: 0.8f)); model.add(keras.layers.LeakyReLU(LeakyReLU_alpha)); model.add(keras.layers.Conv2D(32, 3, 1, padding: "same", activation: activation)); model.add(keras.layers.BatchNormalization(momentum: 0.8f)); model.add(keras.layers.LeakyReLU(LeakyReLU_alpha)); model.add(keras.layers.Conv2D(1, 3, 1, padding: "same", activation: "tanh")); model.summary(); return(model); }