コード例 #1
0
        public override Graph BuildGraph()
        {
            var graph = new Graph().as_default();

            X = tf.placeholder(tf.float32, (-1, timesteps, num_input));
            Y = tf.placeholder(tf.float32, (-1, num_classes));

            // Hidden layer weights => 2*n_hidden because of forward + backward cells
            var weights = tf.Variable(tf.random_normal((2 * num_hidden, num_classes)));
            var biases  = tf.Variable(tf.random_normal(num_classes));

            // Unstack to get a list of 'timesteps' tensors of shape (batch_size, num_input)
            var x = tf.unstack(X, timesteps, 1);

            // Define lstm cells with tensorflow
            // Forward direction cell
            var lstm_fw_cell = new BasicLstmCell(num_hidden, forget_bias: 1.0f);
            // Backward direction cell
            var lstm_bw_cell = new BasicLstmCell(num_hidden, forget_bias: 1.0f);

            // Get lstm cell output
            var(outputs, _, _) = rnn.static_bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x, dtype: tf.float32);

            // Linear activation, using rnn inner loop last output
            var logits = tf.matmul(outputs.Last(), weights) + biases;

            prediction = tf.nn.softmax(logits);

            // Define loss and optimizer
            loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
                                         logits: logits, labels: Y));
            var optimizer = tf.train.GradientDescentOptimizer(learning_rate: learning_rate);

            train_op = optimizer.minimize(loss_op);

            // Evaluate model (with test logits, for dropout to be disabled)
            var correct_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1));

            accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32));

            return(graph);
        }
コード例 #2
0
ファイル: CRNN.cs プロジェクト: mlnethub/TfConsoleApp
        Tensor _rnn(Tensor inputs, Tensor seqLength)
        {
            Tensor interOutePuts = null;

            tf_with(tf.variable_scope(name: null, default_name: "bidirectional-rnn-1"), bw_scope =>
            {
                var lstmFwCell1 = new BasicLstmCell(256);
                var lstmBwCell1 = new BasicLstmCell(256);
                var interOutput = rnn.static_bidirectional_rnn(lstmFwCell1, lstmBwCell1, new[] { inputs }, seqLength, dtype: TF_DataType.TF_FLOAT);
                interOutePuts   = tf.concat(interOutput.Item1, 2);
            });

            Tensor outputs = null;

            tf_with(tf.variable_scope(name: null, default_name: "bidirectional-rnn-2"), bw_scope => {
                var lstmFwCell2 = new BasicLstmCell(256);
                var lstmBwCell2 = new BasicLstmCell(256);
                var opts        = rnn.static_bidirectional_rnn(lstmFwCell2, lstmBwCell2, new[] { interOutePuts }, seqLength, dtype: TF_DataType.TF_FLOAT);
                //var interOutput2 = tf.concat(interOutput.Item1, 2);
                outputs = tf.concat(opts.Item1, 2);
            });
            return(outputs);
        }
コード例 #3
0
        /// <summary>
        /// Creates a bidirectional recurrent neural network.
        /// </summary>
        public static (Tensor[], LSTMStateTuple, LSTMStateTuple) static_bidirectional_rnn(BasicLstmCell cell_fw,
                                                                                          BasicLstmCell cell_bw,
                                                                                          Tensor[] inputs,
                                                                                          Tensor initial_state_fw = null,
                                                                                          Tensor initial_state_bw = null,
                                                                                          TF_DataType dtype       = TF_DataType.DtInvalid,
                                                                                          Tensor sequence_length  = null,
                                                                                          string scope            = null)
        {
            if (inputs == null || inputs.Length == 0)
            {
                throw new ValueError("inputs must not be empty");
            }

            Tensor[]       output_fw       = null;
            Tensor[]       output_bw       = null;
            LSTMStateTuple output_state_fw = null;
            LSTMStateTuple output_state_bw = null;

            tf_with(tf.variable_scope(scope ?? "bidirectional_rnn"), delegate
            {
                // Forward direction
                tf_with(tf.variable_scope("fw"), fw_scope =>
                {
                    (output_fw, output_state_fw) = static_rnn(
                        cell_fw,
                        inputs,
                        initial_state_fw,
                        dtype,
                        sequence_length,
                        scope: fw_scope);
                });

                // backward direction
                tf_with(tf.variable_scope("bw"), bw_scope =>
                {
                    var reversed_inputs          = _reverse_seq(inputs, sequence_length);
                    (output_bw, output_state_bw) = static_rnn(
                        cell_bw,
                        reversed_inputs,
                        initial_state_bw,
                        dtype,
                        sequence_length,
                        scope: bw_scope);
                });
            });