public static void Run() { DataMaker dataMaker = new DataMaker(STEPS_PER_CYCLE, NUMBER_OF_CYCLES); NdArray trainData = dataMaker.Make(); FunctionStack model = new FunctionStack("Test8", new Linear(true, 1, 5, name: "Linear l1"), new LSTM(true, 5, 5, name: "LSTM l2"), new Linear(true, 5, 1, name: "Linear l3") ); model.SetOptimizer(new Adam()); RILogManager.Default?.SendDebug("Training..."); for (int epoch = 0; epoch < TRAINING_EPOCHS; epoch++) { NdArray[] sequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE); Real loss = ComputeLoss(model, sequences); model.Update(); model.ResetState(); if (epoch != 0 && epoch % DISPLAY_EPOCH == 0) { RILogManager.Default?.SendDebug("[{0}]training loss:\t{1}", epoch, loss); } } RILogManager.Default?.SendDebug("Testing..."); NdArray[] testSequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE); int sample_index = 45; predict(testSequences[sample_index], model, PREDICTION_LENGTH); }
public static void Run() { DataMaker dataMaker = new DataMaker(STEPS_PER_CYCLE, NUMBER_OF_CYCLES); NdArray <Real> trainData = dataMaker.Make(); //ネットワークの構成は FunctionStack に書き連ねる FunctionStack <Real> model = new FunctionStack <Real>( new Linear <Real>(1, 5, name: "Linear l1"), new LSTM <Real>(5, 5, name: "LSTM l2"), new Linear <Real>(5, 1, name: "Linear l3") ); //optimizerを宣言 Adam <Real> adam = new Adam <Real>(); adam.SetUp(model); //訓練ループ Console.WriteLine("Training..."); for (int epoch = 0; epoch < TRAINING_EPOCHS; epoch++) { NdArray <Real>[] sequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE); Real loss = ComputeLoss(model, sequences); adam.Update(); model.ResetState(); if (epoch != 0 && epoch % DISPLAY_EPOCH == 0) { Console.WriteLine("[{0}]training loss:\t{1}", epoch, loss); } } Console.WriteLine("Testing..."); NdArray <Real>[] testSequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE); int sample_index = 45; predict(testSequences[sample_index], model, PREDICTION_LENGTH); }
public static void Run() { DataMaker dataMaker = new DataMaker(STEPS_PER_CYCLE, NUMBER_OF_CYCLES); NdArray trainData = dataMaker.Make(); //Network configuration is written in FunctionStack FunctionStack model = new FunctionStack( new Linear(1, 5, name: "Linear l1"), new LSTM(5, 5, name: "LSTM l2"), new Linear(5, 1, name: "Linear l3") ); //Declare optimizer model.SetOptimizer(new Adam()); //Training loop Console.WriteLine("Training..."); for (int epoch = 0; epoch < TRAINING_EPOCHS; epoch++) { NdArray[] sequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE); Real loss = ComputeLoss(model, sequences); model.Update(); model.ResetState(); if (epoch != 0 && epoch % DISPLAY_EPOCH == 0) { Console.WriteLine("[{0}]training loss:\t{1}", epoch, loss); } } Console.WriteLine("Testing..."); NdArray[] testSequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE); int sample_index = 45; predict(testSequences[sample_index], model, PREDICTION_LENGTH); }