Пример #1
0
        /// <summary>
        /// Initializes a new instance of the <see cref="Dense"/> class.
        /// </summary>
        ///
        /// <param name="units">Positive integer, dimensionality of the output space.</param>
        /// <param name="input_dim">The input dim.</param>
        /// <param name="batch_input_shape">The batch input shape.</param>
        /// <param name="input_shape">The input shape.</param>
        /// <param name="activation">The activation function to use.</param>
        /// <param name="use_bias">Whether the layer uses a bias vector.</param>
        ///
        public Dense(int units, IActivationFunction activation = null, bool use_bias = true,
                     IWeightInitializer kernel_initializer     = null, IWeightInitializer bias_initializer = null,
                     IWeightRegularizer kernel_regularizer     = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null,
                     IWeightConstraint kernel_constraint       = null, IWeightConstraint bias_constraint   = null,
                     int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null)
            : base(input_dim: input_dim, input_shape: input_shape, batch_input_shape: batch_input_shape)
        {
            //if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            //    kwargs['input_shape'] = (kwargs.pop('input_dim'),)

            if (bias_initializer == null)
            {
                bias_initializer = new Zeros();
            }
            if (kernel_initializer == null)
            {
                kernel_initializer = new GlorotUniform();
            }

            this.units                = units;
            this.activation           = activation;
            this.use_bias             = use_bias;
            this.kernel_initializer   = kernel_initializer;
            this.bias_initializer     = bias_initializer;
            this.kernel_regularizer   = kernel_regularizer;
            this.bias_regularizer     = bias_regularizer;
            this.activity_regularizer = activity_regularizer;
            this.kernel_constraint    = kernel_constraint;
            this.bias_constraint      = bias_constraint;

            this.input_spec.Add(new InputSpec(min_ndim: 2));
            this.supports_masking = true;
        }
Пример #2
0
 public Conv2D(int filters,
               int[] kernel_size                       = null,
               int[] strides                           = null,
               PaddingType padding                     = PaddingType.Valid,
               DataFormatType?data_format              = null,
               int[] dilation_rate                     = null,
               string activation                       = null,
               bool use_bias                           = true,
               IWeightInitializer kernel_initializer   = null,
               IWeightInitializer bias_initializer     = null,
               IWeightRegularizer kernel_regularizer   = null,
               IWeightRegularizer bias_regularizer     = null,
               IWeightRegularizer activity_regularizer = null,
               IWeightConstraint kernel_constraint     = null,
               IWeightConstraint bias_constraint       = null,
               int?[] input_shape                      = null)
     : this(filters : filters,
            kernel_size : kernel_size,
            strides : strides,
            padding : padding,
            data_format : data_format,
            dilation_rate : dilation_rate,
            activation : Activation.Create(activation),
            use_bias : use_bias,
            kernel_initializer : kernel_initializer,
            bias_initializer : bias_initializer,
            kernel_regularizer : kernel_regularizer,
            bias_regularizer : bias_regularizer,
            activity_regularizer : activity_regularizer,
            kernel_constraint : kernel_constraint,
            bias_constraint : bias_constraint,
            input_shape : input_shape)
 {
 }
Пример #3
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Dense"/> class.
 /// </summary>
 ///
 /// <param name="units">Positive integer, dimensionality of the output space.</param>
 /// <param name="input_dim">The input dim.</param>
 /// <param name="batch_input_shape">The batch input shape.</param>
 /// <param name="input_shape">The input shape.</param>
 /// <param name="activation">The activation function to use.</param>
 /// <param name="use_bias">Whether the layer uses a bias vector.</param>
 ///
 public Dense(int units, string activation, bool use_bias = true,
              IWeightInitializer kernel_initializer       = null, IWeightInitializer bias_initializer = null,
              IWeightRegularizer kernel_regularizer       = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null,
              IWeightConstraint kernel_constraint         = null, IWeightConstraint bias_constraint   = null,
              int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null)
     : this(units : units, activation : Activation.Create(activation), use_bias : use_bias,
            kernel_initializer : kernel_initializer, bias_initializer : bias_initializer,
            kernel_regularizer : kernel_regularizer, bias_regularizer : bias_regularizer, activity_regularizer : activity_regularizer,
            kernel_constraint : kernel_constraint, bias_constraint : bias_constraint,
            input_dim : input_dim, input_shape : input_shape, batch_input_shape : batch_input_shape)
 {
 }
Пример #4
0
 public Conv2D(int filters,
               int[] kernel_size                       = null,
               int[] strides                           = null,
               PaddingType padding                     = PaddingType.Valid,
               DataFormatType?data_format              = null,
               int[] dilation_rate                     = null,
               IActivationFunction activation          = null,
               bool use_bias                           = true,
               IWeightInitializer kernel_initializer   = null,
               IWeightInitializer bias_initializer     = null,
               IWeightRegularizer kernel_regularizer   = null,
               IWeightRegularizer bias_regularizer     = null,
               IWeightRegularizer activity_regularizer = null,
               IWeightConstraint kernel_constraint     = null,
               IWeightConstraint bias_constraint       = null,
               int?[] input_shape                      = null)
     : base(rank: 2,
            filters: filters,
            kernel_size: kernel_size,
            strides: strides,
            padding: padding,
            data_format: data_format,
            dilation_rate: dilation_rate,
            activation: activation,
            use_bias: use_bias,
            kernel_initializer: kernel_initializer,
            bias_initializer: bias_initializer,
            kernel_regularizer: kernel_regularizer,
            bias_regularizer: bias_regularizer,
            activity_regularizer: activity_regularizer,
            kernel_constraint: kernel_constraint,
            bias_constraint: bias_constraint,
            input_shape: input_shape)
 {
     this.input_spec = new List <InputSpec> {
         new InputSpec(ndim: 4)
     };
 }
Пример #5
0
        /// <summary>
        /// Initializes a new instance of the <see cref="Dense"/> class.
        /// </summary>
        ///
        /// <param name="units">Positive integer, dimensionality of the output space.</param>
        /// <param name="input_dim">The input dim.</param>
        /// <param name="batch_input_shape">The batch input shape.</param>
        /// <param name="input_shape">The input shape.</param>
        /// <param name="activation">The activation function to use.</param>
        /// <param name="use_bias">Whether the layer uses a bias vector.</param>
        /// <param name="spectral_norm_iteration">Whether the layer uses a bias vector.</param>
        ///
        public Dense(int units, IActivationFunction activation = null, bool use_bias = true,
                     IWeightInitializer kernel_initializer     = null, IWeightInitializer bias_initializer = null,
                     IWeightRegularizer kernel_regularizer     = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null,
                     IWeightConstraint kernel_constraint       = null, IWeightConstraint bias_constraint   = null,
                     int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null, int spectral_norm_iteration = 0)
            : base(input_dim: input_dim, input_shape: input_shape, batch_input_shape: batch_input_shape)
        {
            // https://github.com/fchollet/keras/blob/f65a56fb65062c8d14d215c9f4b1015b97cc5bf3/keras/layers/core.py#L791

            //if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            //    kwargs['input_shape'] = (kwargs.pop('input_dim'),)

            if (bias_initializer == null)
            {
                bias_initializer = new Zeros();
            }
            if (kernel_initializer == null)
            {
                kernel_initializer = new GlorotUniform();
            }

            this.units                = units;
            this.activation           = activation;
            this.use_bias             = use_bias;
            this.kernel_initializer   = kernel_initializer;
            this.bias_initializer     = bias_initializer;
            this.kernel_regularizer   = kernel_regularizer;
            this.bias_regularizer     = bias_regularizer;
            this.activity_regularizer = activity_regularizer;
            this.kernel_constraint    = kernel_constraint;
            this.bias_constraint      = bias_constraint;

            this.input_spec = new List <InputSpec>();
            this.input_spec.Add(new InputSpec(min_ndim: 2));
            this.supports_masking        = true;
            this.spectral_norm_iteration = spectral_norm_iteration;
        }
Пример #6
0
        /// <summary>
        /// Initializes a new instance of the <see cref="_Conv" /> class.
        /// </summary>
        /// <param name="rank">rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution.</param>
        /// <param name="filters">Integer, the dimensionality of the output space (i.e.the number output of filters in the convolution).</param>
        /// <param name="kernel_size">An integer or tuple/list of n integers, specifying the dimensions of the convolution window.</param>
        /// <param name="strides">An integer or tuple/list of n integers, specifying the strides of the convolution. Specifying any stride value != 1 is incompatible with specifying any `dilation_rate` value != 1.</param>
        /// <param name="padding">One of `"valid"` or `"same"` (case-insensitive).</param>
        /// <param name="data_format">A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs.
        ///   `channels_last` corresponds to inputs with shape `(batch, ..., channels)` while `channels_first` corresponds to inputs with shape
        ///   `(batch, channels, ...)`. It defaults to the `image_data_format` value found in your Keras config file at `~/.keras/keras.json`.
        ///   If you never set it, then it will be "channels_last".</param>
        /// <param name="dilation_rate">An integer or tuple/list of n integers, specifying the dilation rate to use for dilated convolution. Currently, specifying any `dilation_rate` value != 1 is incompatible with specifying any `strides` value != 1.</param>
        /// <param name="activation">Activation function to use (see[activations](../activations.md)). If you don't specify anything, no activation is applied (ie. "linear" activation: `a(x) = x`).</param>
        /// <param name="use_bias">Boolean, whether the layer uses a bias vector.</param>
        /// <param name="kernel_initializer">Initializer for the `kernel` weights matrix (see[initializers](../initializers.md)).</param>
        /// <param name="bias_initializer">Initializer for the bias vector (see[initializers](../initializers.md)).</param>
        /// <param name="kernel_regularizer">Regularizer function applied to the `kernel` weights matrix (see[regularizer](../regularizers.md)).</param>
        /// <param name="bias_regularizer">Regularizer function applied to the bias vector (see[regularizer](../regularizers.md)).</param>
        /// <param name="activity_regularizer">Regularizer function applied to the output of the layer(its "activation"). (see[regularizer](../regularizers.md)).</param>
        /// <param name="kernel_constraint">Constraint function applied to the kernel matrix (see[constraints](../constraints.md)).</param>
        /// <param name="bias_constraint">Constraint function applied to the bias vector (see[constraints](../constraints.md)).</param>
        public _Conv(int rank,
                     int filters,
                     int[] kernel_size,
                     int[] strides                           = null,
                     PaddingType padding                     = PaddingType.Valid,
                     DataFormatType?data_format              = null,
                     int[] dilation_rate                     = null,
                     IActivationFunction activation          = null,
                     bool use_bias                           = true,
                     IWeightInitializer kernel_initializer   = null,
                     IWeightInitializer bias_initializer     = null,
                     IWeightRegularizer kernel_regularizer   = null,
                     IWeightRegularizer bias_regularizer     = null,
                     IWeightRegularizer activity_regularizer = null,
                     IWeightConstraint kernel_constraint     = null,
                     IWeightConstraint bias_constraint       = null,
                     int?[] input_shape                      = null)
            : base(input_shape: input_shape)
        {
            if (kernel_initializer == null)
            {
                kernel_initializer = new GlorotUniform();
            }

            if (bias_initializer == null)
            {
                bias_initializer = new Zeros();
            }

            if (strides == null)
            {
                strides = Vector.Create <int>(size: rank, value: 1);
            }

            if (dilation_rate == null)
            {
                dilation_rate = Vector.Create <int>(size: rank, value: 1);
            }

            if (data_format == null)
            {
                data_format = K.image_data_format();
            }

            if (kernel_size.Length != rank)
            {
                throw new ArgumentException("kernel_size");
            }

            if (strides.Length != rank)
            {
                throw new ArgumentException("strides");
            }

            if (dilation_rate.Length != rank)
            {
                throw new ArgumentException("dilation_rate");
            }

            // https://github.com/fchollet/keras/blob/f65a56fb65062c8d14d215c9f4b1015b97cc5bf3/keras/layers/convolutional.py#L101

            this.rank                 = rank;
            this.filters              = filters;
            this.kernel_size          = kernel_size;
            this.strides              = strides;
            this.padding              = padding;
            this.data_format          = data_format;
            this.dilation_rate        = dilation_rate;
            this.activation           = activation;
            this.use_bias             = use_bias;
            this.kernel_initializer   = kernel_initializer;
            this.bias_initializer     = bias_initializer;
            this.kernel_regularizer   = kernel_regularizer;
            this.bias_regularizer     = bias_regularizer;
            this.activity_regularizer = activity_regularizer;
            this.kernel_constraint    = kernel_constraint;
            this.bias_constraint      = bias_constraint;
            this.input_spec           = new List <InputSpec> {
                new InputSpec(ndim: this.rank + 2)
            };
        }