Beispiel #1
0
        public void ExecuteMethod()
        {
            ShuffleIterator iterator = new ShuffleIterator(50, 101, new Random());

            int[] indexes1 = iterator.Next();
            int[] indexes2 = iterator.Next();
            int[] indexes3 = iterator.Next();
            int[] indexes4 = iterator.Next();
            int[] indexes5 = iterator.Next();
            int[] indexes6 = iterator.Next();
            int[] indexes7 = iterator.Next();
            int[] indexes8 = iterator.Next();

            Assert.AreEqual(50, iterator.NumBatches);
            Assert.AreEqual(101, iterator.Counts);
            Assert.AreEqual(3, iterator.Epoch);
            Assert.AreEqual(8, iterator.Iteration);

            CollectionAssert.AreNotEquivalent(indexes1, indexes2);
            CollectionAssert.AreNotEquivalent(indexes3, indexes4);
        }
Beispiel #2
0
        static void Main()
        {
            const string dirpath_dataset = "mnist_dataset";
            const string dirpath_result  = "result";
            const int    classes         = 10;

            Console.WriteLine("Download mnist...");
            MnistDownloader.Download(dirpath_dataset);

            Console.WriteLine("Setup loader...");
            Random random = new Random(1234);

            MnistLoader loader         = new MnistLoader(dirpath_dataset, num_batches: 1000);
            Iterator    train_iterator = new ShuffleIterator(loader.NumBatches, loader.CountTrainDatas, random);
            Iterator    test_iterator  = new ShuffleIterator(loader.NumBatches, loader.CountTestDatas, random);

            Console.WriteLine("Create input tensor...");
            VariableField x = new Tensor(loader.BatchShape);
            VariableField t = new Tensor(Shape.Vector(loader.NumBatches));

            Console.WriteLine("Build model...");
            Field      y = CNN.Forward(x, classes);
            Field      acc = Accuracy(y, t);
            Field      err = Sum(SoftmaxCrossEntropy(y, OneHotVector(t, classes)), axes: new int[] { Axis.Map0D.Channels });
            StoreField accnode = acc.Save(), lossnode = Average(err).Save();

            Console.WriteLine("Build optimize flow...");
            (Flow trainflow, Parameters parameters) = Flow.Optimize(err);

            Console.WriteLine("Initialize params...");
            parameters
            .Where((parameter) => parameter.Category == ParameterCategory.Kernel)
            .InitializeTensor((tensor) => new HeNormal(tensor, random));
            parameters
            .Where((parameter) => parameter.Category == ParameterCategory.Bias)
            .InitializeTensor((tensor) => new Zero(tensor));

            Console.WriteLine("Set params updater...");
            parameters.AddUpdater((parameter) => new Nadam(parameter, alpha: 0.01f));
            parameters.AddUpdater((parameter) => new Ridge(parameter, decay: 1e-4f));

            Console.WriteLine("Training...");
            Train(train_iterator, loader, x, t, accnode, lossnode, trainflow, parameters);

            Console.WriteLine("Build inference flow...");
            Flow testflow = Flow.Inference(accnode);

            Console.WriteLine("Testing...");
            Test(test_iterator, loader, x, t, testflow, accnode);

            Console.WriteLine("Saving snapshot...");
            Snapshot      snapshot = parameters.Save();
            SnapshotSaver saver    = new ZippedBinaryShapshotSaver();

            if (!Directory.Exists(dirpath_result))
            {
                Directory.CreateDirectory(dirpath_result);
            }
            saver.Save($"{dirpath_result}/mnist.tss", snapshot);

            Console.WriteLine("END");
            Console.Read();
        }