Esempio n. 1
0
        public static void RunPredictTest()
        {
            LenetModel model = new LenetModel();

            model.Load(@"lenet.params");
            MnistDataSet ds        = new MnistDataSet(@"C:\素材\data\train-images.idx3-ubyte", @"C:\素材\data\train-labels.idx1-ubyte");
            int          W         = 28;
            int          H         = 28;
            List <float> listData  = ds.Data;
            List <float> listLabel = ds.Label;
            int          dataCount = ds.Count;

            using (FloatListHolder hData = listData.GetHolder())
                using (FloatListHolder hLabel = listLabel.GetHolder())
                {
                    NDArray data_array = new NDArray(new Shape((uint)dataCount, 1, (uint)W, (uint)H), Context.Gpu(),
                                                     false);  // store in main memory, and copy to
                                                              // device memory while training
                    NDArray label_array = new NDArray(new Shape((uint)dataCount), Context.Gpu(),
                                                      false); // it's also ok if just store them all in device memory

                    data_array.SyncCopyFromCPU(hData.Handle, (ulong)(dataCount * W * H));
                    label_array.SyncCopyFromCPU(hLabel.Handle, (ulong)dataCount);
                    data_array.WaitToRead();
                    label_array.WaitToRead();

                    for (int i = 0; i < 100; i++)
                    {
                        NDArray data   = data_array.Slice((uint)i, (uint)i + 1);
                        String  output = model.Predict(data);
                        MnistDataSet.PrintImage(output, data);
                        System.Threading.Thread.Sleep(1000);
                    }
                }
        }
Esempio n. 2
0
        public void Run()
        {
            Symbol lenet = CreateLenet();

            //Symbol lenet = CreateFrom(@"C:\Works\Projects\80_Project_Python\mxnet\ocr\model\mnist-symbol.json");

            /*setup basic configs*/
            int   valFold       = 1;
            int   W             = 28;
            int   H             = 28;
            uint  batchSize     = 256;
            int   maxEpoch      = 20;
            float learning_rate = 0.05f;
            float weight_decay  = 0.0001f;

            MnistDataSet ds = new MnistDataSet(@"C:\素材\data\train-images.idx3-ubyte", @"C:\素材\data\train-labels.idx1-ubyte");
            //ds.Print();

            List <float> listData  = ds.Data;
            List <float> listLabel = ds.Label;
            int          dataCount = ds.Count;

            using (FloatListHolder hData = listData.GetHolder())
                using (FloatListHolder hLabel = listLabel.GetHolder())
                {
                    NDArray data_array = new NDArray(new Shape((uint)dataCount, 1, (uint)W, (uint)H), ctx_cpu,
                                                     false); // store in main memory, and copy to
                                                             // device memory while training

                    NDArray label_array = new NDArray(new Shape((uint)dataCount), ctx_cpu,
                                                      false); // it's also ok if just store them all in device memory

                    data_array.SyncCopyFromCPU(hData.Handle, (ulong)(dataCount * W * H));
                    label_array.SyncCopyFromCPU(hLabel.Handle, (ulong)dataCount);
                    data_array.WaitToRead();
                    label_array.WaitToRead();

                    uint train_num = (uint)(dataCount * (1 - valFold / 10.0));
                    train_data  = data_array.Slice(0, train_num);
                    train_label = label_array.Slice(0, train_num);
                    val_data    = data_array.Slice(train_num, (uint)dataCount);
                    val_label   = label_array.Slice(train_num, (uint)dataCount);

                    Console.WriteLine("Data loaded ok!");

                    /*init some of the args*/
                    args_map["data"]       = data_array.Slice(0, (uint)batchSize).Clone(ctx_dev);
                    args_map["data_label"] = label_array.Slice(0, (uint)batchSize).Clone(ctx_dev);
                    NDArray.WaitAll();

                    Console.WriteLine("Data sliced ok!");
                    lenet.InferArgsMap(ctx_dev, args_map, args_map, new XavierInitializer(2));
                    Optimizer opt = OptimizerRegistry.Find("sgd");
                    opt.SetParam("momentum", 0.9).SetParam("rescale_grad", 1.0 / batchSize);

                    for (int ITER = 0; ITER < maxEpoch; ++ITER)
                    {
                        Stopwatch sw = new Stopwatch();
                        sw.Start();
                        uint start_index = 0;
                        while (start_index < train_num)
                        {
                            if (start_index + batchSize > train_num)
                            {
                                start_index = train_num - batchSize;
                            }
                            args_map["data"]       = train_data.Slice(start_index, start_index + batchSize).Clone(ctx_dev);
                            args_map["data_label"] = train_label.Slice(start_index, start_index + batchSize).Clone(ctx_dev);
                            start_index           += batchSize;
                            NDArray.WaitAll();

                            Executor exe = lenet.SimpleBind(ctx_dev, args_map, new XavierInitializer(2));
                            exe.Forward(true);
                            exe.Backward();
                            exe.UpdateAll(opt, learning_rate, weight_decay);
                            exe.Dispose();
                        }
                        sw.Stop();

                        Console.WriteLine("Epoch[" + ITER + "] validation accuracy = " + ValAccuracy(batchSize, lenet) + ", time cost " + sw.Elapsed.TotalSeconds.ToString("0.00") + "s");
                    }
                }

            NDArray.Save("lenet.params", args_map);
        }