示例#1
0
        public override ConfigDict GetConfig()
        {
            var config = new ConfigDict {
                {
                    "alpha_initializer",
                    Initializer.Serialize(this.alpha_initializer)
                },
                {
                    "alpha_regularizer",
                    Regularizer.Serialize(this.alpha_regularizer)
                },
                {
                    "alpha_constraint",
                    Constraint.Serialize(this.alpha_constraint)
                },
                {
                    "shared_axes",
                    this.shared_axes
                }
            };
            var base_config = base.GetConfig();

            base_config.Update(config);
            return(base_config);
        }
示例#2
0
 public Dense(int units,
              string activation = "",
              bool use_bias     = true,
              Initializer kernel_initializer   = null,
              Initializer bias_initializer     = null,
              Regularizer kernel_regularizer   = null,
              Regularizer bias_regularizer     = null,
              Regularizer activity_regularizer = null,
              Constraint kernel_constraint     = null,
              Constraint bias_constraint       = null,
              bool sparse_weight = false,
              Shape input_shape  = null,
              int?input_dim      = null)
     : base(input_dim.HasValue ? new Shape(input_dim.Value) : input_shape)
 {
     this.units                = units;
     this.activation           = new Activation(activation, null);
     this.use_bias             = use_bias;
     this.kernel_initializer   = kernel_initializer ?? new GlorotUniform(null);
     this.bias_initializer     = bias_initializer ?? new Zeros();
     this.kernel_regularizer   = kernel_regularizer;
     this.bias_regularizer     = bias_regularizer;
     this.activity_regularizer = activity_regularizer;
     this.kernel_constraint    = kernel_constraint;
     this.bias_constraint      = bias_constraint;
     this.input_spec           = new InputSpec[] { new InputSpec(min_ndim: 2) };
     this.supports_masking     = true;
     this.sparse_weight        = sparse_weight;
 }
示例#3
0
        public override ConfigDict GetConfig()
        {
            var config = new ConfigDict {
                {
                    "units",
                    this.units
                },
                {
                    "activation",
                    Activations.Serialize(this.activation)
                },
                {
                    "use_bias",
                    this.use_bias
                },
                {
                    "kernel_initializer",
                    Initializer.Serialize(this.kernel_initializer)
                },
                {
                    "bias_initializer",
                    Initializer.Serialize(this.bias_initializer)
                },
                {
                    "kernel_regularizer",
                    Regularizer.Serialize(this.kernel_regularizer)
                },
                {
                    "bias_regularizer",
                    Regularizer.Serialize(this.bias_regularizer)
                },
                {
                    "activity_regularizer",
                    Regularizer.Serialize(this.activity_regularizer)
                },
                {
                    "kernel_constraint",
                    Constraint.Serialize(this.kernel_constraint)
                },
                {
                    "bias_constraint",
                    Constraint.Serialize(this.bias_constraint)
                }
            };
            var base_config = base.GetConfig();

            base_config.Update(config);
            return(base_config);
        }
示例#4
0
        /// <summary>
        ///   Compute the error cost of the given Theta parameter for the training and label sets
        /// </summary>
        /// <param name="theta">Learning Theta parameters</param>
        /// <returns></returns>
        public override double ComputeCost(Vector theta)
        {
            var m = X.Rows;

            var s = (X * theta).ToVector();

            var j = 1.0 / (2.0 * m) * ((s - Y) ^ 2.0).Sum();

            if (Lambda != 0)
            {
                j = Regularizer.Regularize(j, theta, m, Lambda);
            }

            return(j);
        }
示例#5
0
        /// <summary>
        /// Compute the error cost of the given Theta parameter for the training and label sets
        /// </summary>
        /// <param name="theta">Learning Theta parameters</param>
        /// <returns></returns>
        public override double ComputeCost(Vector theta)
        {
            int m = X.Rows;

            Vector s = (X * theta).ToVector();

            s = LogisticFunction.Compute(s);

            Vector slog = s.Calc(v => System.Math.Log(1.0 - v));

            double j = (-1.0 / m) * (Y.Dot(s.Log()) + ((1.0 - Y).Dot(slog)));

            if (Lambda != 0)
            {
                j = Regularizer.Regularize(j, theta, m, Lambda);
            }

            return(j);
        }
示例#6
0
        /// <summary>
        /// Compute the error cost of the given Theta parameter for the training and label sets
        /// </summary>
        /// <param name="theta">Learning Theta parameters</param>
        /// <returns></returns>
        public override Vector ComputeGradient(Vector theta)
        {
            int    m        = X.Rows;
            Vector gradient = Vector.Zeros(theta.Length);

            Vector s = (X * theta).ToVector();

            for (int i = 0; i < theta.Length; i++)
            {
                gradient[i] = 1.0 / m * ((s - Y) * X[i, VectorType.Col]).Sum();
            }

            if (Lambda != 0)
            {
                gradient = Regularizer.Regularize(gradient, theta, m, Lambda);
            }

            return(gradient);
        }
示例#7
0
        public PReLU(Initializer alpha_initializer = null, Regularizer alpha_regularizer = null, Constraint alpha_constraint = null, Shape shared_axes = null)
        {
            if (alpha_initializer == null)
            {
                alpha_initializer = new Zeros();
            }

            this.supports_masking  = true;
            this.alpha_initializer = alpha_initializer;
            this.alpha_regularizer = alpha_regularizer;
            this.alpha_constraint  = alpha_constraint;
            if (shared_axes == null)
            {
                this.shared_axes = null;
            }
            else
            {
                this.shared_axes = shared_axes;
            }
        }
示例#8
0
        /// <summary>
        ///   Compute the error gradient of the given Theta parameter for the training and label sets
        /// </summary>
        /// <param name="theta">Learning Theta parameters</param>
        /// <returns></returns>
        public override Vector ComputeGradient(Vector theta)
        {
            var m        = X.Rows;
            var gradient = Vector.Zeros(theta.Length);

            var s = (X * theta).ToVector();

            s = LogisticFunction.Compute(s);

            for (var i = 0; i < theta.Length; i++)
            {
                gradient[i] = 1.0 / m * ((s - Y) * X[i, VectorType.Col]).Sum();
            }

            if (Lambda != 0)
            {
                gradient = Regularizer.Regularize(gradient, theta, m, Lambda);
            }

            return(gradient);
        }
示例#9
0
 public virtual KerasSymbol AddWeight(string name, Shape shape, DType dtype = null, Initializer initializer = null, Regularizer regularizer = null, bool trainable = true, Constraint constraint = null, bool sparse_weight = false)
 {
     throw new NotImplementedException();
 }
示例#10
0
 private void _handle_weight_regularization(string name, VariableV1 variable, Regularizer regularizer) => throw new NotImplementedException();
示例#11
0
 public void add_weight(string name                     = null, TensorShape shape = null, string dtype = null, Initializer initializer = null,
                        Regularizer regularizer         = null, bool?trainable    = null, ConstraintBase constraint = null,
                        dynamic partitioner             = null, bool?use_resource = null, VariableSynchronization synchronization = VariableSynchronization.Auto,
                        VariableAggregation aggregation = VariableAggregation.None, Dictionary <string, object> kwargs = null) => throw new NotImplementedException();