Exemplo n.º 1
0
        public override void DoWork(string[] args)
        {
            var folder = args[1];

            var model = ModelIO.Load(folder);

            DoWork(model);
            ModelIO.Save(model);
        }
Exemplo n.º 2
0
        public override void DoWork(string[] args)
        {
            if (args.Length < 3)
            {
                throw (new ArgumentException(String.Format("Insufficient args")));
            }
            var           folder = args[1];
            AssemblerMode mode;

            if (!Enum.TryParse(args[2], true, out mode))
            {
                throw (new ArgumentException(String.Format("Unknown mode: {0}", args[2])));
            }
            var print = mode == AssemblerMode.Print;

            var model = ModelIO.Load(folder);

            DoWork(model, print);
            // ModelIO.Save(model);
        }
Exemplo n.º 3
0
        static void Main(string[] args)
        {
            var path = ModelIO.DebugSubdir("debug\\20");

            Directory.CreateDirectory(path);
            File.Delete(path + "\\montage.v3");
            var model = ModelIO.Load(path);

            for (int i = 0; i < 1000; i++)
            {
                model.SetChunkMode(i * 3000, (i % 2 == 0)?Mode.Face:Mode.Screen, false);
            }
            for (int i = 0; i < 1000; i++)
            {
                model.Montage.Intervals.Add(new Interval {
                    StartTime = i * 3000, EndTime = i * 3000 + 500, HasVoice = false
                });
                model.Montage.Intervals.Add(new Interval {
                    StartTime = i * 3000 + 500, EndTime = i * 3000 + 3000, HasVoice = true
                });
            }
            ModelIO.Save(model);
        }
Exemplo n.º 4
0
        public static void Run()
        {
            //訓練回数
            const int learningCount = 10000;

            //訓練データ
            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //訓練データラベル
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //ネットワークの構成は FunctionStack に書き連ねる
            FunctionStack nn = new FunctionStack(
                new Linear(2, 2, name: "l1 Linear"),
                new Sigmoid(name: "l1 Sigmoid"),
                new Linear(2, 2, name: "l2 Linear")
                );

            //optimizerを宣言
            nn.SetOptimizer(new MomentumSGD());

            //訓練ループ
            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    //訓練実行時にロス関数を記述
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            //訓練結果を表示
            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }

            //学習の終わったネットワークを保存
            ModelIO.Save(nn, "test.nn");

            //学習の終わったネットワークを読み込み
            Function testnn = ModelIO.Load("test.nn");

            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }
        }
Exemplo n.º 5
0
        public static void Run()
        {
            const int learningCount = 10000;

            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //Training data label
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //Network configuration is written in FunctionStack
            FunctionStack nn = new FunctionStack(
                new Linear(2, 2, name: "l1 Linear"),
                new Sigmoid(name: "l1 Sigmoid"),
                new Linear(2, 2, name: "l2 Linear")
                );

            //optimizer
            nn.SetOptimizer(new MomentumSGD());


            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    //Describe the loss function at training execution
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            //Show training results
            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }

            //Save network after learning
            ModelIO.Save(nn, "test.nn");

            //Load the network after learning
            FunctionStack testnn = ModelIO.Load("test.nn");

            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }
        }
Exemplo n.º 6
0
        public static void Run()
        {
            const int learningCount = 10000;

            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            bool verbose = true;

            FunctionStack nn = new FunctionStack("Test1",
                                                 new Linear(verbose, 2, 2, name: "l1 Linear"),
                                                 new Sigmoid(name: "l1 Sigmoid"),
                                                 new Linear(verbose, 2, 2, name: "l2 Linear"));

            nn.SetOptimizer(new MomentumSGD());

            Info("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            Info("Test Start...");

            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(true, input)?[0];
                int     resultIndex = Array.IndexOf(result?.Data, result.Data.Max());
                Info($"{input[0]} xor {input[1]} = {resultIndex} {result}");
            }

            Info("Saving Model...");
            ModelIO.Save(nn, "test.nn");

            Info("Loading Model...");
            FunctionStack testnn = ModelIO.Load("test.nn");

            Info(testnn.Describe());

            Info("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn?.Predict(true, input)?[0];
                int     resultIndex = Array.IndexOf(result?.Data, result?.Data.Max());
                Info($"{input[0]} xor {input[1]} = {resultIndex} {result}");
            }
        }