예제 #1
0
 protected override void build(TensorShape input_shape)
 {
     embeddings = add_weight(shape: new int[] { input_dim, output_dim },
                             initializer: embeddings_initializer,
                             name: "embeddings");
     built = true;
 }
예제 #2
0
        /// <summary>
        /// Helper function for creating a slot variable.
        /// </summary>
        /// <param name="primary"></param>
        /// <param name="val"></param>
        /// <param name="scope"></param>
        /// <param name="validate_shape"></param>
        /// <param name="shape"></param>
        /// <param name="dtype"></param>
        /// <returns></returns>
        private RefVariable _create_slot_var(VariableV1 primary, IInitializer val, string scope, bool validate_shape,
                                             TensorShape shape, TF_DataType dtype)
        {
            bool use_resource = primary is ResourceVariable;

            if (resource_variable_ops.is_resource_variable(primary))
            {
                use_resource = true;
            }

            var slot = tf.get_variable(
                scope,
                initializer: val,
                trainable: false,
                use_resource: use_resource,
                shape: shape,
                dtype: dtype,
                validate_shape: validate_shape);

            return(slot);
        }
예제 #3
0
        protected override void build(TensorShape input_shape)
        {
            var ndims = input_shape.ndim;

            foreach (var(idx, x) in enumerate(axis))
            {
                if (x < 0)
                {
                    axis[idx] = ndims + x;
                }
            }

            if (fused)
            {
                if (Enumerable.SequenceEqual(axis, new int[] { 3 }))
                {
                    _data_format = "NHWC";
                }
            }

            var param_dtype = _dtype == TF_DataType.DtInvalid ? TF_DataType.TF_FLOAT : _dtype;
            var param_shape = new int[] { input_shape.dims[axis[0]] };

            if (scale)
            {
                gamma = add_weight("gamma",
                                   param_shape,
                                   dtype: param_dtype,
                                   initializer: gamma_initializer,
                                   trainable: true);
            }
            else
            {
                throw new NotImplementedException("add_weight gamma");
            }

            if (center)
            {
                beta = add_weight("beta",
                                  param_shape,
                                  dtype: param_dtype,
                                  initializer: beta_initializer,
                                  trainable: true);
            }
            else
            {
                throw new NotImplementedException("add_weight beta");
            }

            if (_scope != null)
            {
            }

            moving_mean = (RefVariable)add_weight("moving_mean",
                                                  param_shape,
                                                  dtype: param_dtype,
                                                  initializer: moving_mean_initializer,
                                                  synchronization: VariableSynchronization.OnRead,
                                                  trainable: false,
                                                  aggregation: VariableAggregation.Mean);

            moving_variance = (RefVariable)add_weight("moving_variance",
                                                      shape: param_shape,
                                                      dtype: param_dtype,
                                                      initializer: moving_variance_initializer,
                                                      synchronization: VariableSynchronization.OnRead,
                                                      trainable: false,
                                                      aggregation: VariableAggregation.Mean);

            if (renorm)
            {
                throw new NotImplementedException("build when renorm is true");
            }

            built = true;
        }
예제 #4
0
 public static void clone_and_build_model(Model model, Tensor[] input_tensors = null, Tensor[] target_tensors = null, object custom_objects = null,
                                          bool compile_clone = true, bool in_place_reset = false, VariableV1 optimizer_iterations           = null, Hashtable optimizer_config = null)
 => throw new NotImplementedException();
예제 #5
0
 private void _handle_weight_regularization(string name, VariableV1 variable, Regularizer regularizer) => throw new NotImplementedException();
예제 #6
0
 protected VariableV1 _track_checkpointable(VariableV1 checkpointable, string name, bool overwrite = false)
 {
     return(checkpointable);
 }
예제 #7
0
 /// <summary>
 /// Pop and load any deferred checkpoint restores into `trackable`.
 /// </summary>
 /// <param name="name"></param>
 /// <param name="trackable"></param>
 protected void _handle_deferred_dependencies(string name, VariableV1 trackable)
 {
     _maybe_initialize_trackable();
     // TODO
 }