Exemple #1
0
 static void Main()
 {
     Console.Title            = nameof(ResNetSampleProgram);
     GradientLog.OutputWriter = Console.Out;
     GradientSetup.UseEnvironmentFromVariable();
     Run();
 }
        static void Main()
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            Tensor a = tf.constant(5.0, name: "a");
            Tensor b = tf.constant(10.0, name: "b");

            Tensor sum = tf.add(a, b, name: "sum");
            Tensor div = tf.div(a, b, name: "div");

            dynamic config = config_pb2.ConfigProto();

            // unless this is set, tensorflow-gpu consumes all of GPU memory
            // don't set it if you don't want you training to crash due to random OOM in the middle
            config.gpu_options.allow_growth = true;
            Session sess = Session.NewDyn(config: config);

            sess.UseSelf(session => {
                var writer = new FileWriter(".", session.graph);
                Console.WriteLine($"a = {session.run(a)}");
                Console.WriteLine($"b = {session.run(b)}");
                Console.WriteLine($"a + b = {session.run(sum)}");
                Console.WriteLine($"a / b = {session.run(div)}");
                writer.close();
                session.close();
            });
        }
        public override int Run(string[] remainingArguments)
        {
            Console.OutputEncoding    = Encoding.UTF8;
            GradientLog.WarningWriter = GradientLog.OutputWriter = Console.Error;
            if (!string.IsNullOrEmpty(this.CondaEnv))
            {
                GradientSetup.UsePythonEnvironment(PythonEnvironment.EnumerateCondaEnvironments()
                                                   .Single(env => Path.GetFileName(env.Home) == this.CondaEnv));
            }

            var generator = new Gpt2TextGenerator(
                modelName: this.ModelName,
                checkpoint: this.Checkpoint,
                sampleLength: this.MaxLength);
            uint   seed = this.Seed ?? GetRandomSeed();
            string text = generator.GenerateSample(seed);

            while (text.StartsWith(generator.EndOfText))
            {
                text = text.Substring(generator.EndOfText.Length);
            }
            int end = text.IndexOf(generator.EndOfText, StringComparison.Ordinal);

            if (end < 0)
            {
                Console.Error.WriteLine("Text generated from this seed is longer than max-length.");
                Console.WriteLine(text);
                return(-2);
            }

            Console.Write(text.Substring(0, end));

            return(0);
        }
Exemple #4
0
        public CSharpOrNotWindow()
        {
            this.InitializeComponent();
#if DEBUG
            this.AttachDevTools();
#endif
            this.codeDisplay = this.Get <TextBox>("CodeDisplay");
            this.codeDisplay.PropertyChanged += this.CodeDisplayOnPropertyChanged;

            this.codeWindow     = this.Get <TextBlock>("CodeWindow");
            this.language       = this.Get <TextBlock>("Language");
            this.languageBox    = this.Get <ContentControl>("LanguageBox");
            this.codeImage      = this.Get <Image>("CodeImage");
            this.openFileButton = this.Get <Button>("OpenFileButton");

            BitmapTools.SetGreyscalePalette(this.renderTarget);
            BitmapTools.SetGreyscalePalette(this.output);

            GradientSetup.EnsureInitialized();

            this.model = CreateModel(classCount: IncludeExtensions.Length);
            this.model.build(new TensorShape(null, CSharpOrNot.Height, CSharpOrNot.Width, 1));

            this.LoadWeights();
        }
        static void Main()
        {
            GradientEngine.UseEnvironmentFromVariable();
            GradientSetup.EnsureInitialized();
            var sample = new DigitRecognitionCNN();

            sample.Run();
        }
        static int Main(string[] args)
        {
            GradientSetup.OptInToUsageDataCollection();
            // force Gradient initialization
            tensorflow.tf.no_op();

            return(ConsoleCommandDispatcher.DispatchCommand(
                       ConsoleCommandDispatcher.FindCommandsInSameAssemblyAs(typeof(Gpt2Program)),
                       args, Console.Out));
        }
Exemple #7
0
        static int Main(string[] args)
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            // required before using PythonEngine
            GradientSetup.EnsureInitialized();
            np = PythonEngine.ImportModule("numpy");
            return(ConsoleCommandDispatcher.DispatchCommand(
                       ConsoleCommandDispatcher.FindCommandsInSameAssemblyAs(typeof(LinearSvmProgram)),
                       args, Console.Out));
        }
        static int Main(string[] args)
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            // ported from https://github.com/sherjilozair/char-rnn-tensorflow
            return(Parser.Default.ParseArguments <CharRNNTrainingParameters, CharRNNSamplingParameters>(args)
                   .MapResult(
                       (CharRNNTrainingParameters train) => Train(train),
                       (CharRNNSamplingParameters sample) => Sample(sample),
                       _ => 1));
        }
Exemple #9
0
        public static int Main(string[] args)
        {
            GradientSetup.OptInToUsageDataCollection();
            GradientSetup.UseEnvironmentFromVariable();

            dynamic config = config_pb2.ConfigProto();

            config.gpu_options.allow_growth = true;
            tf.keras.backend.set_session(Session.NewDyn(config: config));

            return(ConsoleCommandDispatcher.DispatchCommand(
                       ConsoleCommandDispatcher.FindCommandsInSameAssemblyAs(typeof(CSharpOrNotProgram)),
                       args, Console.Out));
        }
Exemple #10
0
        static void Main()
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            // requires Internet connection
            (dynamic train, dynamic test) = tf.keras.datasets.fashion_mnist.load_data();
            // will be able to do (trainImages, trainLabels) = train;
            ndarray trainImages = train.Item1;
            ndarray trainLabels = train.Item2;
            ndarray testImages  = test.Item1;
            ndarray testLabels  = test.Item2;

            bool loaded = 60000 == trainImages.Length;

            Debug.Assert(loaded);

            var model = new Sequential(new Layer[] {
                // will be able to do: new Flatten(kwargs: new { input_shape = (28, 28) }),
                new Flatten(kwargs: new PythonDict <string, object> {
Exemple #11
0
        private ILyricsGenerator CreateGradientLyrics()
        {
            string condaEnvName = this.Configuration.GetValue <string>("PYTHON_CONDA_ENV_NAME", null);

            if (!string.IsNullOrEmpty(condaEnvName))
            {
                GradientSetup.UseCondaEnvironment(condaEnvName);
            }

            var    logger         = this.LoggerFactory.CreateLogger <Startup>();
            bool   download       = this.Configuration.GetValue("Model:Download", defaultValue: true);
            string gpt2Root       = this.Configuration.GetValue("GPT2_ROOT", Environment.CurrentDirectory);
            string checkpointName = this.Configuration.GetValue("Model:Checkpoint", "latest");
            string modelName      = this.Configuration.GetValue <string>("Model:Type", null)
                                    ?? throw new ArgumentNullException("Model:Type");

            string modelRoot = Path.Combine(gpt2Root, "models", modelName);

            logger.LogInformation($"Using model from {modelRoot}");
            if (!File.Exists(Path.Combine(modelRoot, "encoder.json")))
            {
                if (download)
                {
                    logger.LogInformation($"downloading {modelName} parameters");
                    ModelDownloader.DownloadModelParameters(gpt2Root, modelName);
                    logger.LogInformation($"downloaded {modelName} parameters");
                }
                else
                {
                    throw new FileNotFoundException($"Can't find GPT-2 model in " + modelRoot);
                }
            }

            string runName = this.Configuration.GetValue <string>("Model:Run", null)
                             ?? throw new ArgumentNullException("Model:Run");

            string checkpoint = Gpt2Checkpoints.ProcessCheckpointConfig(gpt2Root, checkpointName, modelName: modelName, runName: runName);

            logger.LogInformation($"Using model checkpoint: {checkpoint}");
            if (checkpoint == null || !File.Exists(checkpoint + ".index"))
            {
                if (download && checkpointName == "latest")
                {
                    logger.LogInformation($"downloading the latest checkpoint for {modelName}, run {runName}");
                    checkpoint = ModelDownloader.DownloadCheckpoint(
                        root: gpt2Root,
                        modelName: modelName,
                        runName: runName);
                    logger.LogInformation("download successful");
                }
                else
                {
                    if (!download)
                    {
                        logger.LogWarning("Model downloading is disabled. See corresponding appsettings file.");
                    }
                    else if (checkpointName != "latest")
                    {
                        logger.LogWarning("Only the 'latest' model can be downloaded. You wanted: " + checkpointName);
                    }
                    throw new FileNotFoundException("Can't find checkpoint " + checkpoint + ".index");
                }
            }

            return(new Gpt2LyricsGenerator(
                       gpt2Root: gpt2Root, modelName: modelName, checkpoint: checkpoint,
                       logger: this.LoggerFactory.CreateLogger <Gpt2LyricsGenerator>(),
                       condaEnv: condaEnvName));
        }
        static void Main()
        {
            GradientLog.OutputWriter = Console.Out;
            GradientSetup.UseEnvironmentFromVariable();

            var input  = tf.placeholder(tf.float32, new TensorShape(null, 1), name: "x");
            var output = tf.placeholder(tf.float32, new TensorShape(null, 1), name: "y");

            var hiddenLayer = tf.layers.dense(input, hiddenSize,
                                              activation: tf.sigmoid_fn,
                                              kernel_initializer: new ones_initializer(),
                                              bias_initializer: new random_uniform_initializer(minval: -x1, maxval: -x0),
                                              name: "hidden");

            var model = tf.layers.dense(hiddenLayer, units: 1, name: "output");

            var cost = tf.losses.mean_squared_error(output, model);

            var training = new GradientDescentOptimizer(learning_rate: learningRate).minimize(cost);

            dynamic init = tf.global_variables_initializer();

            new Session().UseSelf(session => {
                session.run(new[] { init });

                foreach (int iteration in Enumerable.Range(0, iterations))
                {
                    var(trainInputs, trainOutputs) = GenerateTestValues();
                    var iterationDataset           = new PythonDict <dynamic, object> {
                        [input]  = trainInputs,
                        [output] = trainOutputs,
                    };
                    session.run(new[] { training }, feed_dict: iterationDataset);

                    if (iteration % 100 == 99)
                    {
                        Console.WriteLine($"cost = {session.run(new[] { cost }, feed_dict: iterationDataset)}");
                    }
                }

                var(testInputs, testOutputs) = GenerateTestValues();

                var testValues = session.run(new[] { model }, feed_dict: new PythonDict <dynamic, object> {
                    [input] = testInputs,
                });

                new variable_scope("hidden", reuse: true).UseSelf(_ => {
                    Variable w = tf.get_variable("kernel");
                    Variable b = tf.get_variable("bias");
                    Console.WriteLine("hidden:");
                    Console.WriteLine($"kernel= {w.eval()}");
                    Console.WriteLine($"bias  = {b.eval()}");
                });

                new variable_scope("output", reuse: true).UseSelf(_ => {
                    Variable w = tf.get_variable("kernel");
                    Variable b = tf.get_variable("bias");
                    Console.WriteLine("hidden:");
                    Console.WriteLine($"kernel= {w.eval()}");
                    Console.WriteLine($"bias  = {b.eval()}");
                });
            });
        }