static void Train(Sequential generator, Sequential discriminator, Sequential gan, NDarray dataset, int latentDim, int epochs = 200, int batchSize = 128) { var batchPerEpoch = dataset.shape[0] / batchSize; var halfBatch = batchSize / 2; for (var i = 0; i < epochs; i++) { for (var j = 0; j < batchPerEpoch; j++) { var real = GenerateRealSamples(dataset, halfBatch); var dLoss1 = discriminator.TrainOnBatch(real.Item1, real.Item2); var fake = GenerateFakeGeneratorSamples(generator, latentDim, halfBatch); var dLoss2 = discriminator.TrainOnBatch(fake.Item1, fake.Item2); var xGan = GenerateLatentPoints(latentDim, batchSize); var yGan = np.ones(new int[] { batchSize, 1 }); var gLoss = gan.TrainOnBatch(xGan, yGan); Console.WriteLine("> EPOCH: " + i + " \t" + j + "/" + batchPerEpoch + " \td1=" + dLoss1.First() + " \td2=" + dLoss2.First() + " \tg=" + gLoss.Last() + ""); } if (i % 10 == 0 || i < 10) { SummarizePerformance(i, generator, discriminator, dataset, latentDim); } } }
static void TrainDiscriminator(Sequential model, NDarray dataset, int iterations = 20, int batchSize = 128) { var halfBatch = batchSize / 2; for (int i = 0; i < iterations; i++) { var real = GenerateRealSamples(dataset, halfBatch); var realAcc = model.TrainOnBatch(real.Item1, real.Item2); var fake = GenerateFakeSamples(halfBatch); var fakeAcc = model.TrainOnBatch(fake.Item1, fake.Item2); Console.WriteLine(">" + i + "\t real=" + realAcc.Last() * 100 + "%\t fake=" + fakeAcc.Last() * 100 + "%"); } }
static void TrainGan(Sequential ganModel, int latentDim, int epochs, int batchSize) { for (int i = 0; i < epochs; i++) { var xGan = GenerateLatentPoints(latentDim, batchSize); var yGan = np.ones(new int[] { batchSize, 1 }); ganModel.TrainOnBatch(xGan, yGan); } }