Esempio n. 1
0
        public override void DoWork(string[] args)
        {
            var folder = args[1];

            var model = ModelIO.Load(folder);

            DoWork(model);
            ModelIO.Save(model);
        }
Esempio n. 2
0
        public override void DoWork(string[] args)
        {
            if (args.Length < 3)
            {
                throw (new ArgumentException(String.Format("Insufficient args")));
            }
            var           folder = args[1];
            AssemblerMode mode;

            if (!Enum.TryParse(args[2], true, out mode))
            {
                throw (new ArgumentException(String.Format("Unknown mode: {0}", args[2])));
            }
            var print = mode == AssemblerMode.Print;

            var model = ModelIO.Load(folder);

            DoWork(model, print);
            // ModelIO.Save(model);
        }
Esempio n. 3
0
        static void Main(string[] args)
        {
            var path = ModelIO.DebugSubdir("debug\\20");

            Directory.CreateDirectory(path);
            File.Delete(path + "\\montage.v3");
            var model = ModelIO.Load(path);

            for (int i = 0; i < 1000; i++)
            {
                model.SetChunkMode(i * 3000, (i % 2 == 0)?Mode.Face:Mode.Screen, false);
            }
            for (int i = 0; i < 1000; i++)
            {
                model.Montage.Intervals.Add(new Interval {
                    StartTime = i * 3000, EndTime = i * 3000 + 500, HasVoice = false
                });
                model.Montage.Intervals.Add(new Interval {
                    StartTime = i * 3000 + 500, EndTime = i * 3000 + 3000, HasVoice = true
                });
            }
            ModelIO.Save(model);
        }
Esempio n. 4
0
        public static void Run()
        {
            //訓練回数
            const int learningCount = 10000;

            //訓練データ
            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //訓練データラベル
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //ネットワークの構成は FunctionStack に書き連ねる
            FunctionStack nn = new FunctionStack(
                new Linear(2, 2, name: "l1 Linear"),
                new Sigmoid(name: "l1 Sigmoid"),
                new Linear(2, 2, name: "l2 Linear")
                );

            //optimizerを宣言
            nn.SetOptimizer(new MomentumSGD());

            //訓練ループ
            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    //訓練実行時にロス関数を記述
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            //訓練結果を表示
            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }

            //学習の終わったネットワークを保存
            ModelIO.Save(nn, "test.nn");

            //学習の終わったネットワークを読み込み
            Function testnn = ModelIO.Load("test.nn");

            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }
        }
Esempio n. 5
0
        public static void Run()
        {
            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(28);

            RILogManager.Default?.SendDebug("Training Start...");

            int           neuronCount = 28;
            FunctionStack nn          = new FunctionStack("Test19",
                                                          new Linear(true, neuronCount * neuronCount, N, name: "l1 Linear"), // L1
                                                          new BatchNormalization(true, N, name: "l1 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l1 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l2 Linear"), // L2
                                                          new BatchNormalization(true, N, name: "l2 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l2 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l3 Linear"), // L3
                                                          new BatchNormalization(true, N, name: "l3 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l3 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l4 Linear"), // L4
                                                          new BatchNormalization(true, N, name: "l4 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l4 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l5 Linear"), // L5
                                                          new BatchNormalization(true, N, name: "l5 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l5 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l6 Linear"), // L6
                                                          new BatchNormalization(true, N, name: "l6 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l6 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l7 Linear"), // L7
                                                          new BatchNormalization(true, N, name: "l7 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l7 ReLU"),
                                                          new Linear(true, N, N, name: "l8 Linear"), // L8
                                                          new BatchNormalization(true, N, name: "l8 BatchNorm"),
                                                          new LeakyReLU(slope: 0.000001, name: "l8 LeakyReLU"),
                                                          new Linear(true, N, N, name: "l9 Linear"), // L9
                                                          new BatchNormalization(true, N, name: "l9 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l9 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l10 Linear"), // L10
                                                          new BatchNormalization(true, N, name: "l10 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l10 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l11 Linear"), // L11
                                                          new BatchNormalization(true, N, name: "l11 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l11 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l12 Linear"), // L12
                                                          new BatchNormalization(true, N, name: "l12 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l12 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l13 Linear"), // L13
                                                          new BatchNormalization(true, N, name: "l13 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l13 PolynomialApproximantSteep"),
                                                          new Linear(true, N, N, name: "l14 Linear"), // L14
                                                          new BatchNormalization(true, N, name: "l14 BatchNorm"),
                                                          new PolynomialApproximantSteep(slope: 0.000001, name: "l14 PolynomialApproximantSteep"),
                                                          new Linear(true, N, 10, name: "l15 Linear") // L15
                                                          );


            nn.SetOptimizer(new AdaGrad());
            //nn.SetOptimizer(new Adam());

            RunningStatistics stats             = new RunningStatistics();
            Histogram         lossHistogram     = new Histogram();
            Histogram         accuracyHistogram = new Histogram();
            Real totalLoss        = 0;
            long totalLossCounter = 0;
            Real highestAccuracy  = 0;
            Real bestLocalLoss    = 0;
            Real bestTotalLoss    = 0;

            // First skeleton save
            ModelIO.Save(nn, nn.Name);

            for (int epoch = 0; epoch < 1; epoch++)
            {
                RILogManager.Default?.SendDebug("epoch " + (epoch + 1));
                RILogManager.Default?.ViewerSendWatch("epoch", (epoch + 1));

                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    RILogManager.Default?.SendInformation("batch count " + i + "/" + TRAIN_DATA_COUNT);

                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, 28, 28);

                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;



                    stats.Push(sumLoss);
                    lossHistogram.AddBucket(new Bucket(-10, 10));
                    accuracyHistogram.AddBucket(new Bucket(-10.0, 10));

                    if (sumLoss < bestLocalLoss && sumLoss != Double.NaN)
                    {
                        bestLocalLoss = sumLoss;
                    }
                    if (stats.Mean < bestTotalLoss && sumLoss != Double.NaN)
                    {
                        bestTotalLoss = stats.Mean;
                    }

                    try
                    {
                        lossHistogram.AddData(sumLoss);
                    }
                    catch (Exception)
                    {
                    }

                    if (i % 20 == 0)
                    {
                        RILogManager.Default?.SendDebug("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.SendDebug("Total/Mean loss " + stats.Mean);
                        RILogManager.Default?.SendDebug("local loss " + sumLoss);

                        RILogManager.Default?.SendInformation("batch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.ViewerSendWatch("batch count", i);
                        RILogManager.Default?.ViewerSendWatch("Total/Mean loss", stats.Mean);
                        RILogManager.Default?.ViewerSendWatch("local loss", sumLoss);


                        RILogManager.Default?.SendDebug("");

                        RILogManager.Default?.SendDebug("Testing...");
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);
                        Real        accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        if (accuracy > highestAccuracy)
                        {
                            highestAccuracy = accuracy;
                        }

                        RILogManager.Default?.SendDebug("Accuracy: " + accuracy);
                        RILogManager.Default?.ViewerSendWatch("Accuracy", accuracy);

                        try
                        {
                            accuracyHistogram.AddData(accuracy);
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            RILogManager.Default?.SendDebug("Best Accuracy: " + highestAccuracy);
            RILogManager.Default?.SendDebug("Best Total Loss " + bestTotalLoss);
            RILogManager.Default?.SendDebug("Best Local Loss " + bestLocalLoss);

            RILogManager.Default?.ViewerSendWatch("Best Accuracy:", highestAccuracy);
            RILogManager.Default?.ViewerSendWatch("Best Total Loss", bestTotalLoss);
            RILogManager.Default?.ViewerSendWatch("Best Local Loss", bestLocalLoss);

            // Save all with training data
            ModelIO.Save(nn, nn.Name);
        }
Esempio n. 6
0
        public static void Run()
        {
            const int learningCount = 10000;

            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //Training data label
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //Network configuration is written in FunctionStack
            FunctionStack nn = new FunctionStack(
                new Linear(2, 2, name: "l1 Linear"),
                new Sigmoid(name: "l1 Sigmoid"),
                new Linear(2, 2, name: "l2 Linear")
                );

            //optimizer
            nn.SetOptimizer(new MomentumSGD());


            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    //Describe the loss function at training execution
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            //Show training results
            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }

            //Save network after learning
            ModelIO.Save(nn, "test.nn");

            //Load the network after learning
            FunctionStack testnn = ModelIO.Load("test.nn");

            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }
        }
Esempio n. 7
0
        const int N = 30; //It operates at 1000 similar to the reference link but it is slow at the CPU

        public static void Run()
        {
            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(28);

            RILogManager.Default?.SendDebug("Training Start...");

            //Writing the network configuration in FunctionStack
            FunctionStack nn = new FunctionStack("Test7",
                                                 new Linear(true, 28 * 28, N, name: "l1 Linear"), // L1
                                                 new BatchNormalization(true, N, name: "l1 BatchNorm"),
                                                 new ReLU(name: "l1 ReLU"),
                                                 new Linear(true, N, N, name: "l2 Linear"), // L2
                                                 new BatchNormalization(true, N, name: "l2 BatchNorm"),
                                                 new ReLU(name: "l2 ReLU"),
                                                 new Linear(true, N, N, name: "l3 Linear"), // L3
                                                 new BatchNormalization(true, N, name: "l3 BatchNorm"),
                                                 new ReLU(name: "l3 ReLU"),
                                                 new Linear(true, N, N, name: "l4 Linear"), // L4
                                                 new BatchNormalization(true, N, name: "l4 BatchNorm"),
                                                 new ReLU(name: "l4 ReLU"),
                                                 new Linear(true, N, N, name: "l5 Linear"), // L5
                                                 new BatchNormalization(true, N, name: "l5 BatchNorm"),
                                                 new ReLU(name: "l5 ReLU"),
                                                 new Linear(true, N, N, name: "l6 Linear"), // L6
                                                 new BatchNormalization(true, N, name: "l6 BatchNorm"),
                                                 new ReLU(name: "l6 ReLU"),
                                                 new Linear(true, N, N, name: "l7 Linear"), // L7
                                                 new BatchNormalization(true, N, name: "l7 BatchNorm"),
                                                 new ReLU(name: "l7 ReLU"),
                                                 new Linear(true, N, N, name: "l8 Linear"), // L8
                                                 new BatchNormalization(true, N, name: "l8 BatchNorm"),
                                                 new ReLU(name: "l8 ReLU"),
                                                 new Linear(true, N, N, name: "l9 Linear"), // L9
                                                 new BatchNormalization(true, N, name: "l9 BatchNorm"),
                                                 new ReLU(name: "l9 ReLU"),
                                                 new Linear(true, N, N, name: "l10 Linear"), // L10
                                                 new BatchNormalization(true, N, name: "l10 BatchNorm"),
                                                 new ReLU(name: "l10 ReLU"),
                                                 new Linear(true, N, N, name: "l11 Linear"), // L11
                                                 new BatchNormalization(true, N, name: "l11 BatchNorm"),
                                                 new ReLU(name: "l11 ReLU"),
                                                 new Linear(true, N, N, name: "l12 Linear"), // L12
                                                 new BatchNormalization(true, N, name: "l12 BatchNorm"),
                                                 new ReLU(name: "l12 ReLU"),
                                                 new Linear(true, N, N, name: "l13 Linear"), // L13
                                                 new BatchNormalization(true, N, name: "l13 BatchNorm"),
                                                 new ReLU(name: "l13 ReLU"),
                                                 new Linear(true, N, N, name: "l14 Linear"), // L14
                                                 new BatchNormalization(true, N, name: "l14 BatchNorm"),
                                                 new ReLU(name: "l14 ReLU"),
                                                 new Linear(true, N, 10, name: "l15 Linear") // L15
                                                 );

            nn.SetOptimizer(new AdaGrad());


            for (int epoch = 0; epoch < 3; epoch++)
            {
                Real totalLoss        = 0;
                long totalLossCounter = 0;

                //Run the batch
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    RILogManager.Default?.SendDebug("epoch " + (epoch + 1) + " of 3, Batch " + i + " of " + TRAIN_DATA_COUNT);

                    //Get data randomly from training data
                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, 28, 28);

                    //Learn
                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;

                    if (i % 20 == 0)
                    {
                        RILogManager.Default?.SendDebug("batch count " + i + "/" + TRAIN_DATA_COUNT);
                        RILogManager.Default?.SendDebug("total loss " + totalLoss / totalLossCounter);
                        RILogManager.Default?.SendDebug("local loss " + sumLoss);

                        RILogManager.Default?.SendDebug("Testing random data...");

                        //Get data randomly from test data
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);

                        //Run the test
                        Real accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        RILogManager.Default?.SendDebug("Test Accuracy: " + accuracy);
                    }
                }
            }

            ModelIO.Save(nn, "Test7.nn");
            RILogManager.Default?.SendDebug(nn.Describe());
        }
Esempio n. 8
0
        public static void Run()
        {
            int neuronCount = 28;

            RILogManager.Default?.SendDebug("MNIST Data Loading...");
            MnistData mnistData = new MnistData(neuronCount);

            RILogManager.Default.SendInformation("Training Start, creating function stack.");

            SortedFunctionStack   nn        = new SortedFunctionStack();
            SortedList <Function> functions = new SortedList <Function>();

            ParallelOptions po = new ParallelOptions();

            po.MaxDegreeOfParallelism = 4;

            for (int x = 0; x < numLayers; x++)
            {
                Application.DoEvents();

                functions.Add(new Linear(true, neuronCount * neuronCount, N, name: $"l{x} Linear"));
                functions.Add(new BatchNormalization(true, N, name: $"l{x} BatchNorm"));
                functions.Add(new ReLU(name: $"l{x} ReLU"));
                RILogManager.Default.ViewerSendWatch("Total Layers", (x + 1));
            }
            ;

            RILogManager.Default.SendInformation("Adding Output Layer");
            Application.DoEvents();
            nn.Add(new Linear(true, N, 10, noBias: false, name: $"l{numLayers + 1} Linear"));
            RILogManager.Default.ViewerSendWatch("Total Layers", numLayers);


            RILogManager.Default.SendInformation("Setting Optimizer to AdaGrad");
            nn.SetOptimizer(new AdaGrad());
            Application.DoEvents();

            RunningStatistics stats             = new RunningStatistics();
            Histogram         lossHistogram     = new Histogram();
            Histogram         accuracyHistogram = new Histogram();
            Real totalLoss        = 0;
            long totalLossCounter = 0;
            Real highestAccuracy  = 0;
            Real bestLocalLoss    = 0;
            Real bestTotalLoss    = 0;

            for (int epoch = 0; epoch < 3; epoch++)
            {
                RILogManager.Default?.SendDebug("epoch " + (epoch + 1));
                RILogManager.Default.SendInformation("epoch " + (epoch + 1));
                RILogManager.Default.ViewerSendWatch("epoch", (epoch + 1));
                Application.DoEvents();

                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    Application.DoEvents();

                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, neuronCount, neuronCount);

                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;

                    stats.Push(sumLoss);
                    lossHistogram.AddBucket(new Bucket(-10, 10));
                    accuracyHistogram.AddBucket(new Bucket(-10.0, 10));

                    if (sumLoss < bestLocalLoss && !double.IsNaN(sumLoss))
                    {
                        bestLocalLoss = sumLoss;
                    }
                    if (stats.Mean < bestTotalLoss && !double.IsNaN(sumLoss))
                    {
                        bestTotalLoss = stats.Mean;
                    }

                    try
                    {
                        lossHistogram.AddData(sumLoss);
                    }
                    catch (Exception)
                    {
                    }

                    if (i % 20 == 0)
                    {
                        RILogManager.Default.ViewerSendWatch("Batch Count ", i);
                        RILogManager.Default.ViewerSendWatch("Total/Mean loss", stats.Mean);
                        RILogManager.Default.ViewerSendWatch("Local loss", sumLoss);
                        RILogManager.Default.SendInformation("Batch Count " + i + "/" + TRAIN_DATA_COUNT + ", epoch " + epoch + 1);
                        RILogManager.Default.SendInformation("Total/Mean loss " + stats.Mean);
                        RILogManager.Default.SendInformation("Local loss " + sumLoss);
                        Application.DoEvents();


                        RILogManager.Default?.SendDebug("Testing...");

                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);
                        Real        accuracy = Trainer.Accuracy(nn, datasetY?.Data, datasetY.Label);
                        if (accuracy > highestAccuracy)
                        {
                            highestAccuracy = accuracy;
                        }

                        RILogManager.Default?.SendDebug("Accuracy: " + accuracy);

                        RILogManager.Default.ViewerSendWatch("Best Accuracy: ", highestAccuracy);
                        RILogManager.Default.ViewerSendWatch("Best Total Loss ", bestTotalLoss);
                        RILogManager.Default.ViewerSendWatch("Best Local Loss ", bestLocalLoss);
                        Application.DoEvents();

                        try
                        {
                            accuracyHistogram.AddData(accuracy);
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            ModelIO.Save(nn, Application.StartupPath + "\\test20.nn");
            RILogManager.Default?.SendDebug("Best Accuracy: " + highestAccuracy);
            RILogManager.Default?.SendDebug("Best Total Loss " + bestTotalLoss);
            RILogManager.Default?.SendDebug("Best Local Loss " + bestLocalLoss);
            RILogManager.Default.ViewerSendWatch("Best Accuracy: ", highestAccuracy);
            RILogManager.Default.ViewerSendWatch("Best Total Loss ", bestTotalLoss);
            RILogManager.Default.ViewerSendWatch("Best Local Loss ", bestLocalLoss);
        }
Esempio n. 9
0
        public static void Run()
        {
            const int learningCount = 10000;

            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            bool verbose = true;

            FunctionStack nn = new FunctionStack("Test1",
                                                 new Linear(verbose, 2, 2, name: "l1 Linear"),
                                                 new Sigmoid(name: "l1 Sigmoid"),
                                                 new Linear(verbose, 2, 2, name: "l2 Linear"));

            nn.SetOptimizer(new MomentumSGD());

            Info("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            Info("Test Start...");

            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(true, input)?[0];
                int     resultIndex = Array.IndexOf(result?.Data, result.Data.Max());
                Info($"{input[0]} xor {input[1]} = {resultIndex} {result}");
            }

            Info("Saving Model...");
            ModelIO.Save(nn, "test.nn");

            Info("Loading Model...");
            FunctionStack testnn = ModelIO.Load("test.nn");

            Info(testnn.Describe());

            Info("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn?.Predict(true, input)?[0];
                int     resultIndex = Array.IndexOf(result?.Data, result?.Data.Max());
                Info($"{input[0]} xor {input[1]} = {resultIndex} {result}");
            }
        }