Exemplo n.º 1
0
 /// <summary>
 ///     Initializes a new instance of the <see cref="Dense" /> class.
 /// </summary>
 /// <param name="dim">Positive integer, dimensionality of the output space..</param>
 /// <param name="activation">
 ///     Activation function to use. If you don't specify anything, no activation is applied (ie. "linear"
 ///     activation: a(x) = x). <see cref="SiaNet.Common.OptActivations" />
 /// </param>
 /// <param name="useBias">Boolean, whether the layer uses a bias vector.</param>
 /// <param name="weightInitializer">Initializer for the kernel weights matrix. </param>
 /// <param name="biasInitializer">Initializer for the bias vector. </param>
 public Dense(
     int dim,
     ActivationBase activation = null,
     bool useBias = false,
     InitializerBase weightInitializer = null,
     InitializerBase biasInitializer   = null)
 {
     Dim               = dim;
     Activation        = activation;
     UseBias           = useBias;
     WeightInitializer = weightInitializer ?? new Xavier();
     BiasInitializer   = biasInitializer ?? new Zeros();
 }
Exemplo n.º 2
0
        /// <summary>
        ///     Initializes a new instance of the <see cref="CudaRNN" /> class.
        /// </summary>
        /// <param name="hiddenSize">Size of the hidden layer.</param>
        /// <param name="numLayers">The number of layers.</param>
        /// <param name="bidirectional">If bidirectional RNN</param>
        /// <param name="activation">Activation function to use. Supported are ReLU and TanH</param>
        /// <param name="weightInitializer">The weight initializer.</param>
        public CudaRNN(
            uint hiddenSize,
            uint numLayers,
            bool bidirectional                = false,
            ActivationBase activation         = null,
            InitializerBase weightInitializer = null)
        {
            LayerSize         = hiddenSize;
            Layers            = numLayers;
            BiDirectional     = bidirectional;
            WeightInitializer = weightInitializer ?? new Xavier();
            Activation        = activation ?? new ReLU();

            if (!(Activation is ReLU) && !(Activation is Tanh))
            {
                throw new NotSupportedException("Supported activation for RNN is ReLU and Tanh");
            }
        }
Exemplo n.º 3
0
 /// <summary>
 ///     Initializes a new instance of the <see cref="LSTM" /> class.
 /// </summary>
 /// <param name="dim">Positive integer, dimensionality of the output space.</param>
 /// <param name="shape">The input shape.</param>
 /// <param name="activation">
 ///     Activation function to use. If you don't specify anything, no activation is applied (ie.
 ///     "linear" activation: a(x) = x). <see cref="SiaNet.Common.OptActivations" />
 /// </param>
 /// <param name="useBias">Boolean, whether the layer uses a bias vector.</param>
 /// <param name="weightInitializer">
 ///     Initializer for the kernel weights matrix. <see cref="SiaNet.Common.OptInitializers" />
 /// </param>
 /// <param name="biasInitializer">Initializer for the bias vector. <see cref="SiaNet.Common.OptInitializers" /></param>
 public LSTM(
     int dim,
     int?cellDim = null,
     ActivationBase activation            = null,
     ActivationBase recurrentActivation   = null,
     InitializerBase weightInitializer    = null,
     InitializerBase recurrentInitializer = null,
     bool useBias = true,
     InitializerBase biasInitializer = null,
     bool returnSequence             = false)
 {
     Dim                  = dim;
     CellDim              = cellDim;
     Activation           = activation ?? new Tanh();
     RecurrentActivation  = recurrentActivation ?? new Sigmoid();
     UseBias              = useBias;
     ReturnSequence       = returnSequence;
     WeightInitializer    = weightInitializer ?? new GlorotUniform();
     RecurrentInitializer = recurrentInitializer ?? new GlorotUniform();
     BiasInitializer      = biasInitializer ?? new Zeros();
 }
Exemplo n.º 4
0
 /// <summary>
 ///     Initializes a new instance of the <see cref="Conv1D" /> class.
 /// </summary>
 /// <param name="shape">The 1D input shape.</param>
 /// <param name="channels">Integer, the dimensionality of the output space</param>
 /// <param name="kernalSize">An integer specifying the length of the 1D convolution window.</param>
 /// <param name="strides">An integer specifying the stride length of the convolution.</param>
 /// <param name="padding">
 ///     Boolean, if true results in padding the input such that the output has the same length as the
 ///     original input.
 /// </param>
 /// <param name="dialation">
 ///     An integer specifying the dilation rate to use for dilated convolution. Currently, specifying
 ///     any dilation_rate value != 1 is incompatible with specifying any strides value != 1.
 /// </param>
 /// <param name="activation">
 ///     Activation function to use. If you don't specify anything, no activation is applied (ie.
 ///     "linear" activation: a(x) = x). <see cref="SiaNet.Common.OptActivations" />
 /// </param>
 /// <param name="useBias">Boolean, whether the layer uses a bias vector.</param>
 /// <param name="weightInitializer">
 ///     Initializer for the kernel weights matrix. <see cref="SiaNet.Common.OptInitializers" />
 /// </param>
 /// <param name="biasInitializer">Initializer for the bias vector. <see cref="SiaNet.Common.OptInitializers" /></param>
 public Conv1D(
     int channels,
     int kernalSize,
     int strides                       = 1,
     bool padding                      = true,
     int dialation                     = 1,
     ActivationBase activation         = null,
     bool useBias                      = false,
     InitializerBase weightInitializer = null,
     InitializerBase biasInitializer   = null)
 {
     WeightInitializer = weightInitializer ?? new Xavier();
     BiasInitializer   = biasInitializer ?? new Zeros();
     Channels          = channels;
     KernalSize        = kernalSize;
     Padding           = padding;
     Dialation         = dialation;
     Activation        = activation;
     UseBias           = useBias;
     Strides           = strides;
 }
Exemplo n.º 5
0
 /// <summary>
 ///     Initializes a new instance of the <see cref="Conv2D" /> class.
 /// </summary>
 /// <param name="channels">Integer, the dimensionality of the output space.</param>
 /// <param name="kernalSize">
 ///     A tuple of 2 integers, specifying the width and height of the 2D convolution window. Can be a
 ///     single integer to specify the same value for all spatial dimensions.
 /// </param>
 /// <param name="strides">
 ///     A tuple of 2 integers, specifying the strides of the convolution along the width and height. Can
 ///     be a single integer to specify the same value for all spatial dimensions. Specifying any stride value != 1 is
 ///     incompatible with specifying any dilation_rate value != 1.
 /// </param>
 /// <param name="padding">
 ///     Boolean, if true results in padding the input such that the output has the same length as the
 ///     original input.
 /// </param>
 /// <param name="dialation">
 ///     A tuple of 2 integers, specifying the dilation rate to use for dilated convolution. Can be a
 ///     single integer to specify the same value for all spatial dimensions. Currently, specifying any dilation_rate value
 ///     != 1 is incompatible with specifying any stride value != 1.
 /// </param>
 /// <param name="activation">
 ///     Activation function to use. If you don't specify anything, no activation is applied (ie.
 ///     "linear" activation: a(x) = x). <see cref="SiaNet.Common.OptActivations" />
 /// </param>
 /// <param name="useBias">Boolean, whether the layer uses a bias vector.</param>
 /// <param name="weightInitializer">
 ///     Initializer for the kernel weights matrix. <see cref="SiaNet.Common.OptInitializers" />
 /// </param>
 /// <param name="biasInitializer">Initializer for the bias vector. <see cref="SiaNet.Common.OptInitializers" /></param>
 public Conv2D(
     int channels,
     Tuple <int, int> kernalSize,
     Tuple <int, int> strides          = null,
     bool padding                      = true,
     Tuple <int, int> dialation        = null,
     ActivationBase activation         = null,
     bool useBias                      = false,
     InitializerBase weightInitializer = null,
     InitializerBase biasInitializer   = null)
 {
     Channels          = channels;
     KernalSize        = kernalSize;
     Strides           = strides ?? Tuple.Create(1, 1);
     Padding           = padding;
     Dialation         = dialation ?? Tuple.Create(1, 1);
     Activation        = activation;
     UseBias           = useBias;
     WeightInitializer = weightInitializer ?? new Xavier();
     BiasInitializer   = biasInitializer ?? new Zeros();
 }