Beispiel #1
0
 public void PrepareData()
 {
     mnist = MnistDataSet.read_data_sets("mnist", one_hot: true);
     // In this example, we limit mnist data
     (Xtr, Ytr) = mnist.train.next_batch(5000); // 5000 for training (nn candidates)
     (Xte, Yte) = mnist.test.next_batch(200);   // 200 for testing
 }
Beispiel #2
0
 public void PrepareData()
 {
     mnist = MnistDataSet.read_data_sets("mnist", one_hot: true, train_size: TrainSize, validation_size: ValidationSize, test_size: TestSize);
     // In this example, we limit mnist data
     (Xtr, Ytr) = mnist.train.next_batch(TrainSize == null ? 5000 : TrainSize.Value / 100); // 5000 for training (nn candidates)
     (Xte, Yte) = mnist.test.next_batch(TestSize == null ? 200 : TestSize.Value / 100);     // 200 for testing
 }
Beispiel #3
0
        public static void RunPredictTest()
        {
            LenetModel model = new LenetModel();

            model.Load(@"lenet.params");
            MnistDataSet ds        = new MnistDataSet(@"C:\素材\data\train-images.idx3-ubyte", @"C:\素材\data\train-labels.idx1-ubyte");
            int          W         = 28;
            int          H         = 28;
            List <float> listData  = ds.Data;
            List <float> listLabel = ds.Label;
            int          dataCount = ds.Count;

            using (FloatListHolder hData = listData.GetHolder())
                using (FloatListHolder hLabel = listLabel.GetHolder())
                {
                    NDArray data_array = new NDArray(new Shape((uint)dataCount, 1, (uint)W, (uint)H), Context.Gpu(),
                                                     false);  // store in main memory, and copy to
                                                              // device memory while training
                    NDArray label_array = new NDArray(new Shape((uint)dataCount), Context.Gpu(),
                                                      false); // it's also ok if just store them all in device memory

                    data_array.SyncCopyFromCPU(hData.Handle, (ulong)(dataCount * W * H));
                    label_array.SyncCopyFromCPU(hLabel.Handle, (ulong)dataCount);
                    data_array.WaitToRead();
                    label_array.WaitToRead();

                    for (int i = 0; i < 100; i++)
                    {
                        NDArray data   = data_array.Slice((uint)i, (uint)i + 1);
                        String  output = model.Predict(data);
                        MnistDataSet.PrintImage(output, data);
                        System.Threading.Thread.Sleep(1000);
                    }
                }
        }
Beispiel #4
0
        public void PrepareData()
        {
            mnist       = MnistDataSet.read_data_sets("mnist", one_hot: true, train_size: train_size, validation_size: validation_size, test_size: test_size);
            full_data_x = mnist.train.images;

            // download graph meta data
            string url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/graph/kmeans.meta";

            Web.Download(url, "graph", "kmeans.meta");
        }
 public void PrepareData()
 {
     mnist = MnistDataSet.read_data_sets("mnist", one_hot: true, train_size: train_size, validation_size: validation_size, test_size: test_size);
 }
 public void PrepareData()
 {
     mnist       = MnistDataSet.read_data_sets("mnist", one_hot: true);
     full_data_x = mnist.train.images;
 }
 public void PrepareData()
 {
     mnist = MnistDataSet.read_data_sets("mnist", one_hot: true);
 }
Beispiel #8
0
        async Task TrainNetworkAsync()
        {
            try {
                var  weightsName = "mnist5.weights";
                var  weightsPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), weightsName);
                var  hasWeights  = File.Exists(weightsPath);
                bool needsTrain  = !hasWeights;
                needsTrain = true;

                //
                // Create the network
                //
                var network = new RecognizerNetwork();
                network.ImagesShown += Network_ImagesShown;
                Console.WriteLine(network);

                //
                // Read previously trained weights
                //
                if (hasWeights)
                {
                    await network.ReadAsync(weightsPath);
                }

                //
                // Load the data set
                //
                var dataSet = new MnistDataSet(seed: 42);

                //
                // Train the network
                //
                if (needsTrain)
                {
                    await network.TrainAsync(dataSet);
                }

                //
                // Save the network if training went well
                //
                if (network.WeightsAreValid())
                {
                    if (needsTrain)
                    {
                        await network.WriteAsync(weightsPath);
                    }

                    //
                    // Start predicting
                    //
                    await network.PredictAsync(dataSet);
                }
                else
                {
                    Console.WriteLine("Bad weights");
                }

                //
                // All done
                //
                network.ImagesShown -= Network_ImagesShown;
            }
            catch (Exception ex) {
                Console.WriteLine(ex);
            }
        }
 public void PrepareData()
 {
     mnist = MnistDataSet.read_data_sets("logistic_regression", one_hot: true);
 }
Beispiel #10
0
        public void Run()
        {
            Symbol lenet = CreateLenet();

            //Symbol lenet = CreateFrom(@"C:\Works\Projects\80_Project_Python\mxnet\ocr\model\mnist-symbol.json");

            /*setup basic configs*/
            int   valFold       = 1;
            int   W             = 28;
            int   H             = 28;
            uint  batchSize     = 256;
            int   maxEpoch      = 20;
            float learning_rate = 0.05f;
            float weight_decay  = 0.0001f;

            MnistDataSet ds = new MnistDataSet(@"C:\素材\data\train-images.idx3-ubyte", @"C:\素材\data\train-labels.idx1-ubyte");
            //ds.Print();

            List <float> listData  = ds.Data;
            List <float> listLabel = ds.Label;
            int          dataCount = ds.Count;

            using (FloatListHolder hData = listData.GetHolder())
                using (FloatListHolder hLabel = listLabel.GetHolder())
                {
                    NDArray data_array = new NDArray(new Shape((uint)dataCount, 1, (uint)W, (uint)H), ctx_cpu,
                                                     false); // store in main memory, and copy to
                                                             // device memory while training

                    NDArray label_array = new NDArray(new Shape((uint)dataCount), ctx_cpu,
                                                      false); // it's also ok if just store them all in device memory

                    data_array.SyncCopyFromCPU(hData.Handle, (ulong)(dataCount * W * H));
                    label_array.SyncCopyFromCPU(hLabel.Handle, (ulong)dataCount);
                    data_array.WaitToRead();
                    label_array.WaitToRead();

                    uint train_num = (uint)(dataCount * (1 - valFold / 10.0));
                    train_data  = data_array.Slice(0, train_num);
                    train_label = label_array.Slice(0, train_num);
                    val_data    = data_array.Slice(train_num, (uint)dataCount);
                    val_label   = label_array.Slice(train_num, (uint)dataCount);

                    Console.WriteLine("Data loaded ok!");

                    /*init some of the args*/
                    args_map["data"]       = data_array.Slice(0, (uint)batchSize).Clone(ctx_dev);
                    args_map["data_label"] = label_array.Slice(0, (uint)batchSize).Clone(ctx_dev);
                    NDArray.WaitAll();

                    Console.WriteLine("Data sliced ok!");
                    lenet.InferArgsMap(ctx_dev, args_map, args_map, new XavierInitializer(2));
                    Optimizer opt = OptimizerRegistry.Find("sgd");
                    opt.SetParam("momentum", 0.9).SetParam("rescale_grad", 1.0 / batchSize);

                    for (int ITER = 0; ITER < maxEpoch; ++ITER)
                    {
                        Stopwatch sw = new Stopwatch();
                        sw.Start();
                        uint start_index = 0;
                        while (start_index < train_num)
                        {
                            if (start_index + batchSize > train_num)
                            {
                                start_index = train_num - batchSize;
                            }
                            args_map["data"]       = train_data.Slice(start_index, start_index + batchSize).Clone(ctx_dev);
                            args_map["data_label"] = train_label.Slice(start_index, start_index + batchSize).Clone(ctx_dev);
                            start_index           += batchSize;
                            NDArray.WaitAll();

                            Executor exe = lenet.SimpleBind(ctx_dev, args_map, new XavierInitializer(2));
                            exe.Forward(true);
                            exe.Backward();
                            exe.UpdateAll(opt, learning_rate, weight_decay);
                            exe.Dispose();
                        }
                        sw.Stop();

                        Console.WriteLine("Epoch[" + ITER + "] validation accuracy = " + ValAccuracy(batchSize, lenet) + ", time cost " + sw.Elapsed.TotalSeconds.ToString("0.00") + "s");
                    }
                }

            NDArray.Save("lenet.params", args_map);
        }