示例#1
0
        private static void Main()
        {
            var lenet = new Lenet();

            lenet.Run();
            MXNet.MXNotifyShutdown();
        }
示例#2
0
        public Detector(string modelPrefix, int epoch, int width, int height, float meanR, float meanG, float meanB, int deviceType, int deviceId)
        {
            if (epoch < 0 || epoch > 9999)
            {
                throw new ArgumentOutOfRangeException($"Invalid epoch number: {epoch}");
            }

            var modelFile = $"{modelPrefix}-{epoch:D4}.params";

            if (!File.Exists(modelFile))
            {
                throw new FileNotFoundException($"{modelFile} is not found");
            }

            var jsonFile = $"{modelPrefix}-symbol.json";

            if (!File.Exists(jsonFile))
            {
                throw new FileNotFoundException($"{jsonFile} is not found");
            }

            if (width < 1 || height < 1)
            {
                throw new ArgumentOutOfRangeException($"Invalid width or height: {width}, {height}");
            }

            this._Width       = width;
            this._Height      = height;
            this.InputName    = "data";
            this.InputKeys    = new string[1];
            this.InputKeys[0] = this.InputName;
            var inputShapeIndptr = new[] { 0u, 4u };
            var inputShapeData   = new[] { 1u, 3u, (uint)this._Width, (uint)this._Height };

            this._MeanR = meanR;
            this._MeanG = meanG;
            this._MeanB = meanB;

            // load model
            var paramFileBuffer = File.ReadAllBytes(modelFile);
            var json            = File.ReadAllText(jsonFile);

            if (paramFileBuffer.Length > 0)
            {
                MXNet.MXPredCreate(json, paramFileBuffer, paramFileBuffer.Length, deviceType, deviceId, 1, this.InputKeys, inputShapeIndptr, inputShapeData, out this._Predictor);
            }
            else
            {
                throw new ArgumentException($"Unable to read model file: {modelFile}");
            }
        }
示例#3
0
        private static int Main(string[] args)
        {
            if (args.Length < 1)
            {
                Console.WriteLine("No test image here.");
                Console.WriteLine("Usage: ./image-classification-predict apple.jpg");
                return(0);
            }

            var testFile = args[0];

            // Models path for your model, you have to modify it
            var jsonFile   = "model/Inception/Inception-BN-symbol.json";
            var paramFile  = "model/Inception/Inception-BN-0126.params";
            var synsetFile = "model/Inception/synset.txt";
            var ndFile     = "model/Inception/mean_224.nd";

            if (!File.Exists(jsonFile))
            {
                Console.WriteLine($"{jsonFile} is not found.");
                return(0);
            }

            if (!File.Exists(paramFile))
            {
                Console.WriteLine($"{paramFile} is not found.");
                return(0);
            }

            if (!File.Exists(synsetFile))
            {
                Console.WriteLine($"{synsetFile} is not found.");
                return(0);
            }

            var jsonData  = new BufferFile(jsonFile);
            var paramData = new BufferFile(paramFile);

            // Parameters
            var          devType       = 1;  // 1: cpu, 2: gpu
            const int    devId         = 0;  // arbitrary.
            var          numInputNodes = 1u; // 1 for feedforward
            const string inputKey      = "data";

            string[] inputKeys = { inputKey };

            // Image size and channels
            const int width    = 224;
            const int height   = 224;
            const int channels = 3;

            var inputShapeIndptr = new[] { 0u, 4u };
            var inputShapeData   = new[] { (uint)1,
                                           (uint)channels,
                                           (uint)height,
                                           (uint)width };

            if (jsonData.GetLength() == 0 || paramData.GetLength() == 0)
            {
                return(-1);
            }

            MXNet.MXPredCreate(File.ReadAllText(jsonFile),
                               paramData.GetBuffer(),
                               paramData.GetLength(),
                               devType,
                               devId,
                               numInputNodes,
                               inputKeys,
                               inputShapeIndptr,
                               inputShapeData,
                               out var predHnd);

            var imageSize = (uint)(width * height * channels);

            // Read Mean Data
            float[]      ndData = null;
            NDListHandle ndHnd  = null;
            var          ndBuf  = new BufferFile(ndFile);

            if (ndBuf.GetLength() > 0)
            {
                const uint nd_index = 0u;
                var        ndLen    = 0u;
                uint[]     ndShape  = null;
                string     ndKey    = null;
                var        ndNdim   = 0u;

                MXNet.MXNDListCreate(ndBuf.GetBuffer(), ndBuf.GetLength(), out ndHnd, out ndLen);

                MXNet.MXNDListGet(ndHnd, nd_index, out ndKey, out ndData, out ndShape, out ndNdim);
            }

            // Read Image Data
            var imageData = new float[imageSize];

            GetImageFile(testFile, imageData, channels, new Size(width, height), ndData);

            // Set Input Image
            MXNet.MXPredSetInput(predHnd, "data", imageData, imageSize);

            // Do Predict Forward
            MXNet.MXPredForward(predHnd);

            var outputIndex = 0u;

            // Get Output Result
            MXNet.MXPredGetOutputShape(predHnd, outputIndex, out var shape, out var shape_len);

            var size = 1u;

            for (var i = 0u; i < shape_len; ++i)
            {
                size *= shape[i];
            }

            var data = new float[size];

            MXNet.MXPredGetOutput(predHnd, outputIndex, data, size);

            // Release NDList
            ndHnd?.Dispose();

            // Release Predictor
            predHnd?.Dispose();


            // Synset path for your model, you have to modify it
            var synset = LoadSynset(synsetFile);

            // Print Output Data
            PrintOutputResult(data, synset);

            return(0);
        }
示例#4
0
        private static void Main()
        {
            const int imageSize = 28;

            int[]       layers       = { 128, 64, 10 };
            const int   batchSize    = 100;
            const int   maxEpoch     = 10;
            const float learningRate = 0.1f;
            const float weightDecay  = 1e-2f;

            var trainIter = new MXDataIter("MNISTIter")
                            .SetParam("image", "./mnist_data/train-images-idx3-ubyte")
                            .SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
                            .SetParam("batch_size", batchSize)
                            .SetParam("flat", 1)
                            .CreateDataIter();
            var valIter = new MXDataIter("MNISTIter")
                          .SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
                          .SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
                          .SetParam("batch_size", batchSize)
                          .SetParam("flat", 1)
                          .CreateDataIter();

            var net = Mlp(layers);

            Context ctx = Context.Cpu();  // Use CPU for training

            var args = new SortedDictionary <string, NDArray>();

            args["X"]     = new NDArray(new Shape(batchSize, imageSize * imageSize), ctx);
            args["label"] = new NDArray(new Shape(batchSize), ctx);
            // Let MXNet infer shapes other parameters such as weights
            net.InferArgsMap(ctx, args, args);

            // Initialize all parameters with uniform distribution U(-0.01, 0.01)
            var initializer = new Uniform(0.01f);

            foreach (var arg in args)
            {
                // arg.first is parameter name, and arg.second is the value
                initializer.Operator(arg.Key, arg.Value);
            }

            // Create sgd optimizer
            var opt = OptimizerRegistry.Find("sgd");

            opt.SetParam("rescale_grad", 1.0 / batchSize)
            .SetParam("lr", learningRate)
            .SetParam("wd", weightDecay);

            // Create executor by binding parameters to the model
            using (var exec = net.SimpleBind(ctx, args))
            {
                var argNames = net.ListArguments();

                // Start training
                var sw = new Stopwatch();
                for (var iter = 0; iter < maxEpoch; ++iter)
                {
                    var samples = 0;
                    trainIter.Reset();

                    sw.Restart();

                    while (trainIter.Next())
                    {
                        samples += batchSize;
                        var dataBatch = trainIter.GetDataBatch();
                        // Set data and label
                        dataBatch.Data.CopyTo(args["X"]);
                        dataBatch.Label.CopyTo(args["label"]);

                        // Compute gradients
                        exec.Forward(true);
                        exec.Backward();
                        // Update parameters
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "X" || argNames[i] == "label")
                            {
                                continue;
                            }

                            opt.Update(i, exec.ArgmentArrays[i], exec.GradientArrays[i]);
                        }
                    }

                    sw.Stop();

                    var accuracy = new Accuracy();
                    valIter.Reset();
                    while (valIter.Next())
                    {
                        var dataBatch = valIter.GetDataBatch();
                        dataBatch.Data.CopyTo(args["X"]);
                        dataBatch.Label.CopyTo(args["label"]);
                        // Forward pass is enough as no gradient is needed when evaluating
                        exec.Forward(false);
                        accuracy.Update(dataBatch.Label, exec.Outputs[0]);
                    }

                    var duration = sw.ElapsedMilliseconds / 1000.0;
                    Logging.LG($"Epoch: {iter} {samples / duration} samples/sec Accuracy: {accuracy.Get()}");
                }
            }

            MXNet.MXNotifyShutdown();
        }
示例#5
0
        private static void Main()
        {
            /*basic config*/
            const int   batchSize    = 256;
            const int   maxEpo       = 100;
            const float learningRate = 1e-4f;
            const float weightDecay  = 1e-4f;

            /*context and net symbol*/
            var ctx = Context.Gpu();
            var net = AlexnetSymbol(2);

            /*args_map and aux_map is used for parameters' saving*/
            var argsMap = new Dictionary <string, NDArray>();
            var auxMap  = new Dictionary <string, NDArray>();

            /*we should tell mxnet the shape of data and label*/
            argsMap["data"]  = new NDArray(new Shape(batchSize, 3, 256, 256), ctx);
            argsMap["label"] = new NDArray(new Shape(batchSize), ctx);

            /*with data and label, executor can be generated varmatically*/
            using (var exec = net.SimpleBind(ctx, argsMap))
            {
                var argNames            = net.ListArguments();
                var auxiliaryDictionary = exec.AuxiliaryDictionary();
                var argmentDictionary   = exec.ArgmentDictionary();

                /*if fine tune from some pre-trained model, we should load the parameters*/
                // NDArray.Load("./model/alex_params_3", nullptr, &args_map);
                /*else, we should use initializer Xavier to init the params*/
                var xavier = new Xavier(RandType.Gaussian, FactorType.In, 2.34f);
                foreach (var arg in argmentDictionary)
                {
                    /*be careful here, the arg's name must has some specific ends or starts for
                     * initializer to call*/
                    xavier.Operator(arg.Key, arg.Value);
                }

                /*print out to check the shape of the net*/
                foreach (var s in net.ListArguments())
                {
                    Logging.LG(s);

                    var sb = new StringBuilder();
                    var k  = argmentDictionary[s].GetShape();
                    foreach (var i in k)
                    {
                        sb.Append($"{i} ");
                    }

                    Logging.LG(sb.ToString());
                }

                /*these binary files should be generated using im2rc tools, which can be found
                 * in mxnet/bin*/
                var trainIter = new MXDataIter("ImageRecordIter")
                                .SetParam("path_imglist", "./data/train.lst")
                                .SetParam("path_imgrec", "./data/train.rec")
                                .SetParam("data_shape", new Shape(3, 256, 256))
                                .SetParam("batch_size", batchSize)
                                .SetParam("shuffle", 1)
                                .CreateDataIter();
                var valIter = new MXDataIter("ImageRecordIter")
                              .SetParam("path_imglist", "./data/val.lst")
                              .SetParam("path_imgrec", "./data/val.rec")
                              .SetParam("data_shape", new Shape(3, 256, 256))
                              .SetParam("batch_size", batchSize)
                              .CreateDataIter();

                var opt = OptimizerRegistry.Find("ccsgd");
                opt.SetParam("momentum", 0.9)
                .SetParam("rescale_grad", 1.0 / batchSize)
                .SetParam("clip_gradient", 10)
                .SetParam("lr", learningRate)
                .SetParam("wd", weightDecay);

                var accuracyTrain = new Accuracy();
                var accuracyVal   = new Accuracy();
                var loglossVal    = new LogLoss();
                for (var iter = 0; iter < maxEpo; ++iter)
                {
                    Logging.LG($"Train Epoch: {iter}");
                    /*reset the metric every epoch*/
                    accuracyTrain.Reset();
                    /*reset the data iter every epoch*/
                    trainIter.Reset();
                    while (trainIter.Next())
                    {
                        var batch = trainIter.GetDataBatch();
                        Logging.LG($"{trainIter.GetDataBatch().Index.Length}");
                        /*use copyto to feed new data and label to the executor*/
                        batch.Data.CopyTo(argmentDictionary["data"]);
                        batch.Label.CopyTo(argmentDictionary["label"]);
                        exec.Forward(true);
                        exec.Backward();
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "data" || argNames[i] == "label")
                            {
                                continue;
                            }
                            opt.Update(i, exec.ArgmentArrays[i], exec.GradientArrays[i]);
                        }

                        NDArray.WaitAll();
                        accuracyTrain.Update(batch.Label, exec.Outputs[0]);
                    }
                    Logging.LG($"ITER: {iter} Train Accuracy: {accuracyTrain.Get()}");

                    Logging.LG($"Val Epoch: {iter}");
                    accuracyVal.Reset();
                    valIter.Reset();
                    loglossVal.Reset();
                    while (valIter.Next())
                    {
                        var batch = valIter.GetDataBatch();
                        Logging.LG($"{valIter.GetDataBatch().Index.Length}");
                        batch.Data.CopyTo(argmentDictionary["data"]);
                        batch.Label.CopyTo(argmentDictionary["label"]);
                        exec.Forward(false);
                        NDArray.WaitAll();
                        accuracyVal.Update(batch.Label, exec.Outputs[0]);
                        loglossVal.Update(batch.Label, exec.Outputs[0]);
                    }
                    Logging.LG($"ITER: {iter} Val Accuracy: {accuracyVal.Get()}");
                    Logging.LG($"ITER: {iter} Val LogLoss: {loglossVal.Get()}");

                    /*save the parameters*/
                    var savePathParam = $"./model/alex_param_{iter}";
                    var saveArgs      = argmentDictionary;
                    /*we do not want to save the data and label*/
                    if (saveArgs.ContainsKey("data"))
                    {
                        saveArgs.Remove("data");
                    }
                    if (saveArgs.ContainsKey("label"))
                    {
                        saveArgs.Remove("label");
                    }

                    /*the alexnet does not get any aux array, so we do not need to save
                     * aux_map*/
                    Logging.LG($"ITER: {iter} Saving to...{savePathParam}");
                    NDArray.Save(savePathParam, saveArgs);
                }
                /*don't foget to release the executor*/
            }

            MXNet.MXNotifyShutdown();
        }
示例#6
0
        private static void Main()
        {
            /*setup basic configs*/
            const int   W            = 28;
            const int   H            = 28;
            const int   batchSize    = 128;
            const int   maxEpoch     = 100;
            const float learningRate = 1e-4f;
            const float weightDecay  = 1e-4f;

            var contest = Context.Gpu();

            var lenet   = LenetSymbol();
            var argsMap = new SortedDictionary <string, NDArray>();

            argsMap["data"]       = new NDArray(new Shape(batchSize, 1, W, H), contest);
            argsMap["data_label"] = new NDArray(new Shape(batchSize), contest);
            lenet.InferArgsMap(contest, argsMap, argsMap);

            argsMap["fc1_w"] = new NDArray(new Shape(500, 4 * 4 * 50), contest);
            NDArray.SampleGaussian(0, 1, argsMap["fc1_w"]);
            argsMap["fc2_b"] = new NDArray(new Shape(10), contest);
            argsMap["fc2_b"].Set(0);

            var trainIter = new MXDataIter("MNISTIter")
                            .SetParam("image", "./mnist_data/train-images-idx3-ubyte")
                            .SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
                            .SetParam("batch_size", batchSize)
                            .SetParam("shuffle", 1)
                            .SetParam("flat", 0)
                            .CreateDataIter();
            var valIter = new MXDataIter("MNISTIter")
                          .SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
                          .SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
                          .CreateDataIter();

            var opt = OptimizerRegistry.Find("ccsgd");

            opt.SetParam("momentum", 0.9)
            .SetParam("rescale_grad", 1.0)
            .SetParam("clip_gradient", 10)
            .SetParam("lr", learningRate)
            .SetParam("wd", weightDecay);

            using (var exec = lenet.SimpleBind(contest, argsMap))
            {
                var argNames = lenet.ListArguments();

                // Create metrics
                var trainAccuracy = new Accuracy();
                var valAccuracy   = new Accuracy();

                var sw = new Stopwatch();
                for (var iter = 0; iter < maxEpoch; ++iter)
                {
                    var samples = 0;
                    trainIter.Reset();
                    trainAccuracy.Reset();

                    sw.Restart();

                    while (trainIter.Next())
                    {
                        samples += batchSize;
                        var dataBatch = trainIter.GetDataBatch();

                        dataBatch.Data.CopyTo(argsMap["data"]);
                        dataBatch.Label.CopyTo(argsMap["data_label"]);
                        NDArray.WaitAll();

                        // Compute gradients
                        exec.Forward(true);
                        exec.Backward();

                        // Update parameters
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "data" || argNames[i] == "data_label")
                            {
                                continue;
                            }
                            opt.Update(i, exec.ArgmentArrays[i], exec.GradientArrays[i]);
                        }

                        // Update metric
                        trainAccuracy.Update(dataBatch.Label, exec.Outputs[0]);
                    }

                    // one epoch of training is finished
                    sw.Stop();
                    var duration = sw.ElapsedMilliseconds / 1000.0;
                    Logging.LG($"Epoch[{iter}] {samples / duration} samples/sec Train-Accuracy={trainAccuracy.Get()}");

                    valIter.Reset();
                    valAccuracy.Reset();

                    var accuracy = new Accuracy();
                    valIter.Reset();
                    while (valIter.Next())
                    {
                        var dataBatch = valIter.GetDataBatch();
                        dataBatch.Data.CopyTo(argsMap["data"]);
                        dataBatch.Label.CopyTo(argsMap["data_label"]);
                        NDArray.WaitAll();

                        // Only forward pass is enough as no gradient is needed when evaluating
                        exec.Forward(false);
                        NDArray.WaitAll();
                        accuracy.Update(dataBatch.Label, exec.Outputs[0]);
                        valAccuracy.Update(dataBatch.Label, exec.Outputs[0]);
                    }

                    Logging.LG($"Epoch[{iter}] Val-Accuracy={valAccuracy.Get()}");
                }
            }

            MXNet.MXNotifyShutdown();
        }
示例#7
0
        private static void Main(string[] args)
        {
            //var minScore = float.Parse(args[0], NumberStyles.Float, null);
            var minScore = 0.9f;

            const int   imageSize    = 28;
            var         layers       = new[] { 128, 64, 10 };
            const int   batchSize    = 100;
            const int   maxEpoch     = 10;
            const float learningRate = 0.1f;
            const float weightDecay  = 1e-2f;

            var trainIter = new MXDataIter("MNISTIter")
                            .SetParam("image", "./mnist_data/train-images-idx3-ubyte")
                            .SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
                            .SetParam("batch_size", batchSize)
                            .SetParam("flat", 1)
                            .CreateDataIter();
            var valIter = new MXDataIter("MNISTIter")
                          .SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
                          .SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
                          .SetParam("batch_size", batchSize)
                          .SetParam("flat", 1)
                          .CreateDataIter();

            var net = Mlp(layers);

            var ctx = Context.Cpu();  // Use GPU for training

            var dictionary = new Dictionary <string, NDArray>();

            dictionary["X"]     = new NDArray(new Shape(batchSize, imageSize * imageSize), ctx);
            dictionary["label"] = new NDArray(new Shape(batchSize), ctx);
            // Let MXNet infer shapes of other parameters such as weights
            net.InferArgsMap(ctx, dictionary, dictionary);

            // Initialize all parameters with uniform distribution U(-0.01, 0.01)
            var initializer = new Uniform(0.01f);

            foreach (var arg in dictionary)
            {
                // arg.first is parameter name, and arg.second is the value
                initializer.Operator(arg.Key, arg.Value);
            }

            // Create sgd optimizer
            var opt = OptimizerRegistry.Find("sgd");

            opt.SetParam("rescale_grad", 1.0 / batchSize)
            .SetParam("lr", learningRate)
            .SetParam("wd", weightDecay);
            var lrSch = new UniquePtr <LRScheduler>(new FactorScheduler(5000, 0.1f));

            opt.SetLearningRateScheduler(lrSch);

            // Create executor by binding parameters to the model
            using (var exec = net.SimpleBind(ctx, dictionary))
            {
                var argNames = net.ListArguments();

                float score = 0;
                // Start training

                var sw = new Stopwatch();
                for (var iter = 0; iter < maxEpoch; ++iter)
                {
                    var samples = 0;
                    trainIter.Reset();

                    sw.Restart();
                    while (trainIter.Next())
                    {
                        samples += batchSize;
                        var dataBatch = trainIter.GetDataBatch();
                        // Data provided by DataIter are stored in memory, should be copied to GPU first.
                        dataBatch.Data.CopyTo(dictionary["X"]);
                        dataBatch.Label.CopyTo(dictionary["label"]);
                        // CopyTo is imperative, need to wait for it to complete.
                        NDArray.WaitAll();

                        // Compute gradients
                        exec.Forward(true);
                        exec.Backward();
                        // Update parameters
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "X" || argNames[i] == "label")
                            {
                                continue;
                            }

                            var weight = exec.ArgmentArrays[i];
                            var grad   = exec.GradientArrays[i];
                            opt.Update(i, weight, grad);
                        }
                    }

                    sw.Stop();

                    var acc = new Accuracy();
                    valIter.Reset();
                    while (valIter.Next())
                    {
                        var dataBatch = valIter.GetDataBatch();
                        dataBatch.Data.CopyTo(dictionary["X"]);
                        dataBatch.Label.CopyTo(dictionary["label"]);
                        NDArray.WaitAll();
                        // Only forward pass is enough as no gradient is needed when evaluating
                        exec.Forward(false);
                        acc.Update(dataBatch.Label, exec.Outputs[0]);
                    }

                    var duration = sw.ElapsedMilliseconds / 1000.0;
                    var message  = $"Epoch: {iter} {samples / duration} samples/sec Accuracy: {acc.Get()}";
                    Logging.LG(message);
                    score = acc.Get();
                }

                MXNet.MXNotifyShutdown();
                var ret = score >= minScore ? 0 : 1;
                Console.WriteLine($"{ret}");
            }
        }
示例#8
0
        public float[] Detect(string img)
        {
            if (!File.Exists(img))
            {
                throw new FileNotFoundException($"{img} is not found.");
            }

            using (var image = Cv2.ImRead(img))
            {
                if (image.Empty())
                {
                    throw new ArgumentException($"Unable to load image file: {img}");
                }

                if (image.Channels() != 3)
                {
                    throw new ArgumentException($"RGB image required");
                }

                using (var resized = image.Resize(new Size(this._Width, this._Height)))
                {
                    var size   = resized.Channels() * resized.Rows * resized.Cols;
                    var inData = new float[size];

                    // de-interleave and minus means
                    unsafe
                    {
                        var ptr = (byte *)resized.Ptr();
                        fixed(float *dataPtr = &inData[0])
                        {
                            var tmp = dataPtr;

                            for (var i = 0; i < size; i += 3)
                            {
                                *tmp = ptr[i] - this._MeanR;
                                tmp++;
                            }

                            for (var i = 1; i < size; i += 3)
                            {
                                *tmp = ptr[i] - this._MeanG;
                                tmp++;
                            }

                            for (var i = 2; i < size; i += 3)
                            {
                                *tmp = ptr[i] - this._MeanB;
                                tmp++;
                            }
                        }
                    }

                    // usr model to forwad
                    MXNet.MXPredSetInput(this._Predictor, "data", inData, (uint)size);

                    var sw = new Stopwatch();
                    sw.Start();
                    MXNet.MXPredForward(this._Predictor);
                    MXNet.MXPredGetOutputShape(this._Predictor, 0, out var shape, out var shapeLen);

                    var ttSize = 1u;
                    for (var i = 0u; i < shapeLen; ++i)
                    {
                        ttSize *= shape[i];
                    }

                    var outputs = new float[ttSize];
                    MXNet.MXPredGetOutput(this._Predictor, 0, outputs, ttSize);
                    sw.Stop();
                    Console.WriteLine($"Forward elapsed time: {sw.ElapsedMilliseconds} ms");

                    return(outputs);
                }
            }
        }
示例#9
0
        private static void Main()
        {
            const uint  batchSize    = 50;
            const uint  maxEpoch     = 100;
            const float learningRate = 1e-4f;
            const float weightDecay  = 1e-4f;

            var googlenet = GoogleNetSymbol(101 + 1); // +1 is BACKGROUND_Google
            var argsMap   = new Dictionary <string, NDArray>();
            var auxMap    = new Dictionary <string, NDArray>();

            // change device type if you want to use GPU
            var context = Context.Cpu();

            argsMap["data"]       = new NDArray(new Shape(batchSize, 3, 256, 256), context);
            argsMap["data_label"] = new NDArray(new Shape(batchSize), context);
            googlenet.InferArgsMap(Context.Cpu(), argsMap, argsMap);

            var trainIter = new MXDataIter("ImageRecordIter")
                            .SetParam("path_imglist", "train.lst")
                            .SetParam("path_imgrec", "train.rec")
                            .SetParam("data_shape", new Shape(3, 256, 256))
                            .SetParam("batch_size", batchSize)
                            .SetParam("shuffle", 1)
                            .CreateDataIter();

            var valIter = new MXDataIter("ImageRecordIter")
                          .SetParam("path_imglist", "val.lst")
                          .SetParam("path_imgrec", "val.rec")
                          .SetParam("data_shape", new Shape(3, 256, 256))
                          .SetParam("batch_size", batchSize)
                          .CreateDataIter();

            var opt = OptimizerRegistry.Find("ccsgd");

            opt.SetParam("momentum", 0.9)
            .SetParam("rescale_grad", 1.0 / batchSize)
            .SetParam("clip_gradient", 10)
            .SetParam("lr", learningRate)
            .SetParam("wd", weightDecay);

            using (var exec = googlenet.SimpleBind(Context.Cpu(), argsMap))
            {
                var argNames = googlenet.ListArguments();

                for (var iter = 0; iter < maxEpoch; ++iter)
                {
                    Logging.LG($"Epoch: {iter}");

                    trainIter.Reset();
                    while (trainIter.Next())
                    {
                        var dataBatch = trainIter.GetDataBatch();
                        dataBatch.Data.CopyTo(argsMap["data"]);
                        dataBatch.Label.CopyTo(argsMap["data_label"]);
                        NDArray.WaitAll();
                        exec.Forward(true);
                        exec.Backward();
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "data" || argNames[i] == "data_label")
                            {
                                continue;
                            }

                            var weight = exec.ArgmentArrays[i];
                            var grad   = exec.GradientArrays[i];
                            opt.Update(i, weight, grad);
                        }
                    }

                    var acu = new Accuracy();
                    valIter.Reset();
                    while (valIter.Next())
                    {
                        var dataBatch = valIter.GetDataBatch();
                        dataBatch.Data.CopyTo(argsMap["data"]);
                        dataBatch.Label.CopyTo(argsMap["data_label"]);
                        NDArray.WaitAll();
                        exec.Forward(false);
                        NDArray.WaitAll();
                        acu.Update(dataBatch.Label, exec.Outputs[0]);
                    }

                    Logging.LG($"Accuracy: {acu.Get()}");
                }
            }

            MXNet.MXNotifyShutdown();
        }
示例#10
0
        public void Fit(DataIter train, uint epochs = 1, uint batchSize = 32, DataIter validation = null, bool shuffle = false)
        {
            var    args      = new SortedDictionary <string, NDArray>();
            string labelName = "label";
            var    label     = Symbol.Variable(labelName);

            args["X"]       = new NDArray(new Shape(batchSize, (uint)InputShape[0]));
            args[labelName] = new NDArray(new Shape(batchSize, (uint)OutputShape.Size));

            CompiledModel.InferArgsMap(GlobalParam.Device, args, args);

            var initializer = new SiaDNN.Initializers.GlorotUniform();

            foreach (var arg in args)
            {
                initializer.Operator(arg.Key, arg.Value);
            }

            ModelOptimizer.SetParam("rescale_grad", 1.0 / batchSize);

            using (var exec = CompiledModel.SimpleBind(GlobalParam.Device, args))
            {
                var argNames = CompiledModel.ListArguments();

                // Start training
                var sw = new Stopwatch();
                for (var iter = 0; iter < epochs; ++iter)
                {
                    uint samples = 0;
                    train.BatchSize = batchSize;
                    train.Reset();

                    sw.Restart();

                    while (train.Next())
                    {
                        samples += batchSize;
                        var dataBatch = train.GetDataBatch();
                        // Set data and label
                        dataBatch.Data.CopyTo(args["X"]);
                        dataBatch.Label.CopyTo(args[labelName]);

                        // Compute gradients
                        exec.Forward(true);
                        exec.Backward();
                        // Update parameters
                        for (var i = 0; i < argNames.Count; ++i)
                        {
                            if (argNames[i] == "X" || argNames[i] == labelName)
                            {
                                continue;
                            }

                            ModelOptimizer.Update(i, exec.ArgmentArrays[i], exec.GradientArrays[i]);
                        }

                        Metric.Update(dataBatch.Label, exec.Outputs[0]);
                    }

                    sw.Stop();

                    if (validation != null)
                    {
                        validation.BatchSize = batchSize;
                        validation.Reset();
                        while (validation.Next())
                        {
                            var dataBatch = validation.GetDataBatch();
                            dataBatch.Data.CopyTo(args["X"]);
                            dataBatch.Label.CopyTo(args[labelName]);
                            // Forward pass is enough as no gradient is needed when evaluating
                            exec.Forward(false);
                            Metric.Update(dataBatch.Label, exec.Outputs[0]);
                        }
                    }


                    var duration = sw.ElapsedMilliseconds / 1000.0;
                    if (validation == null)
                    {
                        Logging.LG($"Epoch: {iter} {samples / duration} samples/sec Train_Metric: {Metric.Get()}");
                    }
                    else
                    {
                        Logging.LG($"Epoch: {iter} {samples / duration} samples/sec, Train_Metric: {Metric.Get()},  Val_Metric: {Metric.Get()}");
                    }
                }
            }

            MXNet.MXNotifyShutdown();
        }
示例#11
0
 private static void Main()
 {
     Mlp();
     MXNet.MXNotifyShutdown();
 }