コード例 #1
0
        static void Main()
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            Tensor a = tf.constant(5.0, name: "a");
            Tensor b = tf.constant(10.0, name: "b");

            Tensor sum = tf.add(a, b, name: "sum");
            Tensor div = tf.div(a, b, name: "div");

            dynamic config = config_pb2.ConfigProto();

            // unless this is set, tensorflow-gpu consumes all of GPU memory
            // don't set it if you don't want you training to crash due to random OOM in the middle
            config.gpu_options.allow_growth = true;
            Session sess = Session.NewDyn(config: config);

            sess.UseSelf(session => {
                var writer = new FileWriter(".", session.graph);
                Console.WriteLine($"a = {session.run(a)}");
                Console.WriteLine($"b = {session.run(b)}");
                Console.WriteLine($"a + b = {session.run(sum)}");
                Console.WriteLine($"a / b = {session.run(div)}");
                writer.close();
                session.close();
            });
        }
コード例 #2
0
 static void Main()
 {
     Console.Title            = nameof(ResNetSampleProgram);
     GradientLog.OutputWriter = Console.Out;
     GradientSetup.UseEnvironmentFromVariable();
     Run();
 }
コード例 #3
0
        static int Main(string[] args)
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            // required before using PythonEngine
            GradientSetup.EnsureInitialized();
            np = PythonEngine.ImportModule("numpy");
            return(ConsoleCommandDispatcher.DispatchCommand(
                       ConsoleCommandDispatcher.FindCommandsInSameAssemblyAs(typeof(LinearSvmProgram)),
                       args, Console.Out));
        }
コード例 #4
0
        static int Main(string[] args)
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            // ported from https://github.com/sherjilozair/char-rnn-tensorflow
            return(Parser.Default.ParseArguments <CharRNNTrainingParameters, CharRNNSamplingParameters>(args)
                   .MapResult(
                       (CharRNNTrainingParameters train) => Train(train),
                       (CharRNNSamplingParameters sample) => Sample(sample),
                       _ => 1));
        }
コード例 #5
0
        static int Main(string[] args)
        {
            Console.Title = "GPT-2";
            GradientSetup.OptInToUsageDataCollection();
            GradientSetup.UseEnvironmentFromVariable();
            // force Gradient initialization
            tensorflow.tf.no_op();

            return(ConsoleCommandDispatcher.DispatchCommand(
                       ConsoleCommandDispatcher.FindCommandsInSameAssemblyAs(typeof(Gpt2Program)),
                       args, Console.Out));
        }
コード例 #6
0
        public static int Main(string[] args)
        {
            GradientSetup.OptInToUsageDataCollection();
            GradientSetup.UseEnvironmentFromVariable();

            dynamic config = config_pb2.ConfigProto();

            config.gpu_options.allow_growth = true;
            tf.keras.backend.set_session(Session.NewDyn(config: config));

            return(ConsoleCommandDispatcher.DispatchCommand(
                       ConsoleCommandDispatcher.FindCommandsInSameAssemblyAs(typeof(CSharpOrNotProgram)),
                       args, Console.Out));
        }
コード例 #7
0
        static void Main()
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            // requires Internet connection
            (dynamic train, dynamic test) = tf.keras.datasets.fashion_mnist.load_data();
            // will be able to do (trainImages, trainLabels) = train;
            ndarray trainImages = train.Item1;
            ndarray trainLabels = train.Item2;
            ndarray testImages  = test.Item1;
            ndarray testLabels  = test.Item2;

            bool loaded = 60000 == trainImages.Length;

            Debug.Assert(loaded);

            var model = new Sequential(new Layer[] {
                // will be able to do: new Flatten(kwargs: new { input_shape = (28, 28) }),
                new Flatten(kwargs: new PythonDict <string, object> {
コード例 #8
0
        static void Main()
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            var input  = tf.placeholder(tf.float32, new TensorShape(null, 1), name: "x");
            var output = tf.placeholder(tf.float32, new TensorShape(null, 1), name: "y");

            var hiddenLayer = tf.layers.dense(input, hiddenSize,
                                              activation: tf.sigmoid_fn,
                                              kernel_initializer: new ones_initializer(),
                                              bias_initializer: new random_uniform_initializer(minval: -x1, maxval: -x0),
                                              name: "hidden");

            var model = tf.layers.dense(hiddenLayer, units: 1, name: "output");

            var cost = tf.losses.mean_squared_error(output, model);

            var training = new GradientDescentOptimizer(learning_rate: learningRate).minimize(cost);

            dynamic init = tf.global_variables_initializer();

            new Session().UseSelf(session => {
                session.run(new[] { init });

                foreach (int iteration in Enumerable.Range(0, iterations))
                {
                    var(trainInputs, trainOutputs) = GenerateTestValues();
                    var iterationDataset           = new PythonDict <dynamic, object> {
                        [input]  = trainInputs,
                        [output] = trainOutputs,
                    };
                    session.run(new[] { training }, feed_dict: iterationDataset);

                    if (iteration % 100 == 99)
                    {
                        Console.WriteLine($"cost = {session.run(new[] { cost }, feed_dict: iterationDataset)}");
                    }
                }

                var(testInputs, testOutputs) = GenerateTestValues();

                var testValues = session.run(new[] { model }, feed_dict: new PythonDict <dynamic, object> {
                    [input] = testInputs,
                });

                new variable_scope("hidden", reuse: true).UseSelf(_ => {
                    Variable w = tf.get_variable("kernel");
                    Variable b = tf.get_variable("bias");
                    Console.WriteLine("hidden:");
                    Console.WriteLine($"kernel= {w.eval()}");
                    Console.WriteLine($"bias  = {b.eval()}");
                });

                new variable_scope("output", reuse: true).UseSelf(_ => {
                    Variable w = tf.get_variable("kernel");
                    Variable b = tf.get_variable("bias");
                    Console.WriteLine("hidden:");
                    Console.WriteLine($"kernel= {w.eval()}");
                    Console.WriteLine($"bias  = {b.eval()}");
                });
            });
        }