public void NextRound()
        {
            info.NextRound();
            if (cost <= 0)
            {
                SetLastInfo("No research at the moment");
                player.AddResTotal("research", -player.GetResTotal("research"), ResType.Produce);
                return;
            }

            cost -= player.GetResTotal("research");
            player.AddResTotal("research", -player.GetResTotal("research"), ResType.Produce);

            //can research?
            if (cost > 0)
            {
                return;
            }

            //found something?
            List <Research> av = AvailableResearch(actual);

            if (av.Count == 0)
            {
                SetLastInfo("Found nothing in your areas.");
                return;
            }

            //finish it
            Research r = NRandom <Research> .Rand(av);

            finish.Add(r.id, true);
            SetLastInfo($"Eureka! Finish the research {r.Name()}");

            //research again
            BeginNewResearch(actual);
        }
Exemple #2
0
        public void Train()
        {
            string dir = "ND_OPT_ConvAutoencoder_Data";

            Directory.CreateDirectory($@"{dir}");
            Directory.CreateDirectory($@"{dir}\Results");
            Directory.CreateDirectory($@"{dir}\Sources");

            AnimeDatasets a_dataset = new AnimeDatasets(StartSide, @"I:\Datasets\VAE_Dataset\White", @"I:\Datasets\VAE_Dataset\White\conv");//@"I:\Datasets\anime-faces\combined", @"I:\Datasets\anime-faces\combined_small");

            a_dataset.InitializeDataset();

            AnimeDatasets b_dataset = new AnimeDatasets(EndSide, @"I:\Datasets\VAE_Dataset\White", @"I:\Datasets\VAE_Dataset\White\conv");//@"I:\Datasets\anime-faces\combined", @"I:\Datasets\anime-faces\combined_small");

            b_dataset.InitializeDataset();

            Adam      sgd       = new Adam(0.001f);
            Quadratic quadratic = new Quadratic();

            NRandom r  = new NRandom(0);
            NRandom r2 = new NRandom(0);

            Matrix loss_deriv = new Matrix(OutputSize, 1, MemoryFlags.ReadWrite, true);


            #region Setup Database
            Matrix data_vec = new Matrix(LatentSize, 1, MemoryFlags.ReadOnly, false);

            Matrix[]  a_dataset_vec = new Matrix[a_dataset.TrainingFiles.Count];
            float[][] a_dataset_f   = new float[a_dataset.TrainingFiles.Count][];

            Matrix[]  b_dataset_vec = new Matrix[a_dataset.TrainingFiles.Count];
            float[][] b_dataset_f   = new float[a_dataset.TrainingFiles.Count][];

            for (int i = 0; i < a_dataset.TrainingFiles.Count; i++)
            {
                a_dataset_f[i]   = new float[InputSize];
                a_dataset_vec[i] = new Matrix(InputSize, 1, MemoryFlags.ReadOnly, false);
                a_dataset.LoadImage(a_dataset.TrainingFiles[i], a_dataset_f[i]);
                a_dataset_vec[i].Write(a_dataset_f[i]);

                b_dataset_f[i]   = new float[OutputSize];
                b_dataset_vec[i] = new Matrix(OutputSize, 1, MemoryFlags.ReadOnly, false);
                b_dataset.LoadImage(b_dataset.TrainingFiles[i], b_dataset_f[i]);
                b_dataset_vec[i].Write(b_dataset_f[i]);
            }
            #endregion

            for (int i0 = 000; i0 < 20000 * BatchSize; i0++)
            {
                int idx = (r.Next() % (a_dataset.TrainingFiles.Count / 2));

                var out_img = superres_enc_front.ForwardPropagate(a_dataset_vec[idx]);
                quadratic.LossDeriv(out_img[0], b_dataset_vec[idx], loss_deriv, 0);

                superres_dec_back.ResetLayerErrors();
                superres_dec_back.ComputeGradients(loss_deriv);
                superres_dec_back.ComputeLayerErrors(loss_deriv);
                superres_dec_back.UpdateLayers(sgd);

                loss_deriv.Clear();

                if (i0 % BatchSize == 0)
                {
                    a_dataset.SaveImage($@"{dir}\Sources\{i0 / BatchSize}.png", a_dataset_f[idx]);
                    b_dataset.SaveImage($@"{dir}\Results\{i0 / BatchSize}.png", out_img[0].Read());
                }

                Console.Clear();
                Console.Write($"Iteration: {i0 / BatchSize}, Sub-Batch: {i0 % BatchSize}");
            }

            superres_enc_front.Save($@"{dir}\network_final.bin");
            Console.WriteLine("DONE.");
        }
        public void Execute()
        {
            #region QueryHandle

            QueryHandler <T_SLOT, T_SLOT_INFOS, T_BRAIN> Q =
                new QueryHandler <T_SLOT, T_SLOT_INFOS, T_BRAIN>()
            {
                m_brain                  = m_brain,
                m_inputSlotInfos         = m_inputSlotInfos,
                m_inputSlotCoordinateMap = m_inputSlotCoordinateMap,
                m_socketCount            = m_socketCount,
                m_socketOffsets          = m_socketOffsets,
                m_socketsMirrors         = m_socketsMirrors,
                m_socketsMirrorsIndices  = m_socketsMirrorsIndices,
                m_moduleCount            = m_moduleCount,
                m_modulesWeights         = m_modulesWeights,
                m_modulesHeaders         = m_modulesHeaders,
                m_modulesNeighbors       = m_modulesNeighbors,
                m_results                = m_results,
                m_nullPairLookup         = m_nullPairLookup
            };

            #endregion

            NativeList <Neighbor>
            contents = new NativeList <Neighbor>(m_socketCount, Allocator.Temp);

            NativeList <int>
            candidates      = new NativeList <int>(m_modulesNeighbors.Length, Allocator.Temp),
                unsolvables = new NativeList <int>(10, Allocator.Temp);

            NativeList <float>
            weights = new NativeList <float>(m_moduleCount, Allocator.Temp);

            int
                cCount,
                result;

            for (int slotIndex = 0, count = m_inputSlotInfos.Length; slotIndex < count; slotIndex++)
            {
                if (m_results[slotIndex] >= 0)
                {
                    continue;
                }

                if (Q.TryGetCandidates(
                        slotIndex,
                        ref contents,
                        ref candidates,
                        ref weights,
                        out cCount))
                {
                    result = candidates[NRandom.GetRandomWeightedIndex(ref weights, NextFloat())];
                }
                else
                {
                    result = SlotContent.UNSOLVABLE;
                    unsolvables.Add(slotIndex);
                }

                m_results[slotIndex] = result;
            }

            // Second pass if necessary

            if (unsolvables.Length != 0)
            {
                int index = 0;

                for (int u = 0, uCount = unsolvables.Length; u < uCount; u++)
                {
                    index = unsolvables[u];

                    if (Q.TryGetCandidates(
                            index,
                            ref contents,
                            ref candidates,
                            ref weights,
                            out cCount))
                    {
                        result = candidates[NRandom.GetRandomWeightedIndex(ref weights, NextFloat())];
                    }
                    else
                    {
                        result = SlotContent.UNSOLVABLE;
                    }

                    m_results[index] = result;
                }
            }

            contents.Release();
            candidates.Release();
            unsolvables.Release();
            weights.Release();
        }
Exemple #4
0
        public void Train()
        {
            string dir = "GAN_Data";

            Directory.CreateDirectory($@"{dir}");
            Directory.CreateDirectory($@"{dir}\Results");
            Directory.CreateDirectory($@"{dir}\Sources");
            Directory.CreateDirectory($@"{dir}\ResultsPRE");
            Directory.CreateDirectory($@"{dir}\SourcesPRE");

            #region GAN Variables
            var sgd_disc = new Adam(0.0002f, 1e-6f);
            var sgd_gen  = new Adam(0.0002f, 1e-6f);
            var sgd_dec  = new Adam(0.0002f);

            var fake_loss = new NamedLossFunction(NamedLossFunction.GANDiscFake, NamedLossFunction.GANDiscFake);
            var real_loss = new NamedLossFunction(NamedLossFunction.GANDiscReal, NamedLossFunction.GANDiscReal);
            var gen_loss  = new NamedLossFunction(NamedLossFunction.GANGen, NamedLossFunction.GANGen);
            var enc_loss  = new Quadratic();

            NRandom r_dataset = new NRandom(0);
            NRandom r_latent  = new NRandom(0);

            Matrix data_vec    = new Matrix(LatentSize, 1, MemoryFlags.ReadOnly, false);
            Matrix d_real_loss = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            Matrix d_fake_loss = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            Matrix g_loss      = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            Matrix e_loss      = new Matrix(OutputSize, 1, MemoryFlags.ReadWrite, true);
            Matrix loss_reader = new Matrix(1, 1, MemoryFlags.ReadWrite, true);

            float d_real_loss_f = 0;
            float d_fake_loss_f = 0;
            float g_loss_f = 0;
            float d_real_class_f = 0, d_fake_class_f = 0, g_class_f = 0;

            Matrix zero = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            zero.Memory[0] = 0;

            Matrix one = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            one.Memory[0] = 1;
            #endregion


            #region Setup Database
            AnimeDatasets dataset = new AnimeDatasets(StartSide, /*@"I:\Datasets\anime-faces\combined", @"I:\Datasets\anime-faces\combined_small");*/ @"I:\Datasets\VAE_Dataset\White", @"I:\Datasets\VAE_Dataset\White\conv");
            dataset.InitializeDataset();
            Matrix[]  dataset_vec = new Matrix[dataset.TrainingFiles.Count];
            float[][] dataset_f   = new float[dataset.TrainingFiles.Count][];

            for (int i = 0; i < dataset.TrainingFiles.Count; i++)
            {
                dataset_f[i]   = new float[InputSize];
                dataset_vec[i] = new Matrix(InputSize, 1, MemoryFlags.ReadOnly, false);
                dataset.LoadImage(dataset.TrainingFiles[i], dataset_f[i]);
                dataset_vec[i].Write(dataset_f[i]);
            }
            #endregion

            //Pretrain generator
            for (int i0 = 0; i0 < 5000; i0++)
            {
                int idx = 0;
                idx = (r_dataset.Next() % dataset.TrainingFiles.Count);

                var latent = encoder.ForwardPropagate(dataset_vec[idx]);
                var res    = generator.ForwardPropagate(latent);

                e_loss.Clear();
                enc_loss.LossDeriv(res[0], dataset_vec[idx], e_loss, 0.0f * sgd_dec.L2Val / sgd_dec.Net);

                var enc_loss_v = generator_back.ComputeGradients(e_loss);
                generator_back.ComputeLayerErrors(e_loss);
                encoder_back.ComputeGradients(enc_loss_v);
                encoder_back.ComputeLayerErrors(enc_loss_v);

                sgd_dec.Update(0);
                generator_back.UpdateLayers(sgd_gen);
                encoder_back.UpdateLayers(sgd_dec);

                if (i0 % BatchSize == 0)
                {
                    dataset.SaveImage($@"{dir}\SourcesPRE\{i0 / BatchSize}.png", dataset_f[idx]);
                    dataset.SaveImage($@"{dir}\ResultsPRE\{i0 / BatchSize}.png", res[0].Read());
                    generator.Save($@"{dir}\pretrained_generator_fc.bin");
                    encoder.Save($@"{dir}\trained_encoder_fc.bin");
                }

                Console.Clear();
                Console.WriteLine($"Iteration: {i0 / BatchSize} Sub-batch: {i0 % BatchSize}");
            }

            for (int i0 = 000; i0 < 5000 * BatchSize; i0++)
            {
                int idx = 0;
                idx = (r_dataset.Next() % dataset.TrainingFiles.Count);

                //Generate the fake data
                for (int i1 = 0; i1 < LatentSize; i1++)
                {
                    data_vec.Memory[i1] = (float)r_latent.NextGaussian(0, 1);//LatentSize;
                }
                var fake_result = generator.ForwardPropagate(data_vec);

                if (i0 % BatchSize == 0)
                {
                    dataset.SaveImage($@"{dir}\Sources\{i0 / BatchSize}.png", dataset_f[idx]);
                    dataset.SaveImage($@"{dir}\Results\{i0 / BatchSize}.png", fake_result[0].Read());
                }

                Console.Clear();
                Console.WriteLine($"Iteration: {i0 / BatchSize} Sub-batch: {i0 % BatchSize}");
                Console.WriteLine($"Discriminator Real Loss: {d_real_loss_f}\nDiscriminator Fake Loss: {d_fake_loss_f}\nGenerator Loss: {g_loss_f}\n");
                Console.WriteLine($"Discriminator Real Prediction: {d_real_class_f}\nDiscriminator Fake Prediction: {d_fake_class_f}\nGenerator Prediction: {g_class_f}");

                d_fake_loss.Clear();
                d_real_loss.Clear();
                g_loss.Clear();

                zero.Memory[0] = (r_latent.Next() % 1000) / 10000f;
                one.Memory[0]  = 1 - (r_latent.Next() % 1000) / 10000f;

                //Discriminator feed forward for real data
                {
                    var d_real_class = discriminator.ForwardPropagate(dataset_vec[idx]);
                    real_loss.LossDeriv(d_real_class[0], one, d_real_loss, 0.01f * sgd_disc.L2Val / sgd_disc.Net);

                    var d_real_prop = discriminator_back.ComputeGradients(d_real_loss);
                    discriminator_back.ComputeLayerErrors(d_real_loss);

                    d_real_class_f = d_real_class[0].Memory[0];
                    real_loss.Loss(d_real_class[0], one, loss_reader, 0.01f * sgd_disc.L2Val / sgd_disc.Net);
                    d_real_loss_f         = loss_reader.Memory[0];
                    loss_reader.Memory[0] = 0;
                }

                //Discriminator feed forward for fake data
                {
                    var d_fake_class = discriminator.ForwardPropagate(fake_result);
                    fake_loss.LossDeriv(d_fake_class[0], zero, d_fake_loss, 0.01f * sgd_disc.L2Val / sgd_disc.Net);

                    var d_fake_prop = discriminator_back.ComputeGradients(d_fake_loss);
                    discriminator_back.ComputeLayerErrors(d_fake_loss);

                    d_fake_class_f = d_fake_class[0].Memory[0];
                    fake_loss.Loss(d_fake_class[0], zero, loss_reader, 0.01f * sgd_disc.L2Val / sgd_disc.Net);
                    d_fake_loss_f         = loss_reader.Memory[0];
                    loss_reader.Memory[0] = 0;
                }

                //Update and reset discriminator
                sgd_disc.Update(0);
                discriminator_back.UpdateLayers(sgd_disc);
                discriminator_back.ResetLayerErrors();

                //Generate the fake data again
                {
                    for (int i1 = 0; i1 < LatentSize; i1++)
                    {
                        data_vec.Memory[i1] = (float)r_latent.NextGaussian(0, 1);//LatentSize;
                    }
                    fake_result = generator.ForwardPropagate(data_vec);
                    var d_gen_class = discriminator.ForwardPropagate(fake_result);

                    //Compute discriminator crossentropy loss assuming fake is real and propagate
                    gen_loss.LossDeriv(d_gen_class[0], one, g_loss, 0.01f * sgd_gen.L2Val / sgd_gen.Net);
                    var d_err = discriminator_back.ComputeGradients(g_loss);
                    generator_back.ComputeGradients(d_err);
                    generator_back.ComputeLayerErrors(d_err);

                    g_class_f = d_gen_class[0].Memory[0];
                    gen_loss.Loss(d_gen_class[0], one, loss_reader, 0.01f * sgd_gen.L2Val / sgd_gen.Net);
                    g_loss_f = loss_reader.Memory[0];
                    loss_reader.Memory[0] = 0;

                    //Update generator
                    sgd_gen.Update(0);
                    generator_back.UpdateLayers(sgd_gen);
                    generator_back.ResetLayerErrors();
                    discriminator.ResetLayerErrors();
                }
            }

            discriminator.Save($@"{dir}\network_final.bin");
            Console.WriteLine("DONE.");
        }
Exemple #5
0
 public DropoutLayer(float p = 0.3f, int seed = 0)
 {
     P       = p;
     Enabled = true;
     rng     = new NRandom(seed);
 }
 public UniformWeightInitializer(int seed, float bias)
 {
     rng = new NRandom(seed);
     b   = bias;
 }
Exemple #7
0
        public void Train()
        {
            string dir = "GAN_Data";

            Directory.CreateDirectory($@"{dir}");
            Directory.CreateDirectory($@"{dir}\Results");
            Directory.CreateDirectory($@"{dir}\Sources");

            #region GAN Variables
            var sgd_disc = new Adam(0.0002f, 1e-6f);
            var sgd_gen  = new Adam(0.0002f, 1e-6f);

            var fake_loss = new NamedLossFunction(NamedLossFunction.GANDiscFake, NamedLossFunction.GANDiscFake);
            var real_loss = new NamedLossFunction(NamedLossFunction.GANDiscReal, NamedLossFunction.GANDiscReal);
            var gen_loss  = new NamedLossFunction(NamedLossFunction.GANGen, NamedLossFunction.GANGen);

            NRandom r_dataset = new NRandom(0);
            NRandom r_latent  = new NRandom(0);

            Matrix data_vec    = new Matrix(LatentSize, 1, MemoryFlags.ReadOnly, false);
            Matrix d_real_loss = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            Matrix d_fake_loss = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            Matrix g_loss      = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            Matrix loss_reader = new Matrix(1, 1, MemoryFlags.ReadWrite, true);

            float d_real_loss_f = 0;
            float d_fake_loss_f = 0;
            float g_loss_f = 0;
            float d_real_class_f = 0, d_fake_class_f = 0, g_class_f = 0;

            Matrix zero = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            zero.Memory[0] = 0;

            Matrix one = new Matrix(1, 1, MemoryFlags.ReadWrite, true);
            one.Memory[0] = 1;
            #endregion


            #region Setup Database
            Reader dataset = new Reader();
            dataset.InitializeTraining();
            Matrix[] imgs = dataset.TrainingImages;
            #endregion

            for (int i0 = 000; i0 < 1000 * BatchSize; i0++)
            {
                int idx = 0;
                idx = (r_dataset.Next() % imgs.Length);

                //Generate the fake data
                for (int i1 = 0; i1 < LatentSize; i1++)
                {
                    data_vec.Memory[i1] = (float)r_latent.NextGaussian(0, 1);//LatentSize;
                }
                var fake_result = generator.ForwardPropagate(data_vec);

                if (i0 % BatchSize == 0)
                {
                    SaveImage($@"{dir}\Sources\{i0 / BatchSize}.png", imgs[idx].Read(), 28);
                    SaveImage($@"{dir}\Results\{i0 / BatchSize}.png", fake_result[0].Read(), 28);
                }

                Console.Clear();
                Console.WriteLine($"Iteration: {i0 / BatchSize} Sub-batch: {i0 % BatchSize}");
                Console.WriteLine($"Discriminator Real Loss: {d_real_loss_f}\nDiscriminator Fake Loss: {d_fake_loss_f}\nGenerator Loss: {g_loss_f}\n");
                Console.WriteLine($"Discriminator Real Prediction: {d_real_class_f}\nDiscriminator Fake Prediction: {d_fake_class_f}\nGenerator Prediction: {g_class_f}");

                d_fake_loss.Clear();
                d_real_loss.Clear();
                g_loss.Clear();

                zero.Memory[0] = (r_latent.Next() % 100) / 1000f;
                one.Memory[0]  = 1 - (r_latent.Next() % 100) / 1000f;

                //Discriminator feed forward for real data
                {
                    var d_real_class = discriminator.ForwardPropagate(imgs[idx]);
                    real_loss.LossDeriv(d_real_class[0], one, d_real_loss, 0.01f * sgd_disc.L2Val / sgd_disc.Net);

                    var d_real_prop = discriminator_back.ComputeGradients(d_real_loss);
                    discriminator_back.ComputeLayerErrors(d_real_loss);

                    d_real_class_f = d_real_class[0].Memory[0];
                    real_loss.Loss(d_real_class[0], one, loss_reader, 0.01f * sgd_disc.L2Val);
                    d_real_loss_f         = loss_reader.Memory[0];
                    loss_reader.Memory[0] = 0;
                }

                //Discriminator feed forward for fake data
                {
                    var d_fake_class = discriminator.ForwardPropagate(fake_result);
                    fake_loss.LossDeriv(d_fake_class[0], zero, d_fake_loss, 0.01f * sgd_disc.L2Val / sgd_disc.Net);

                    var d_fake_prop = discriminator_back.ComputeGradients(d_fake_loss);
                    discriminator_back.ComputeLayerErrors(d_fake_loss);

                    d_fake_class_f = d_fake_class[0].Memory[0];
                    fake_loss.Loss(d_fake_class[0], zero, loss_reader, 0.01f * sgd_disc.L2Val / sgd_disc.Net);
                    d_fake_loss_f         = loss_reader.Memory[0];
                    loss_reader.Memory[0] = 0;
                }

                //Update and reset discriminator
                sgd_disc.Update(0);
                discriminator_back.UpdateLayers(sgd_disc);
                discriminator_back.ResetLayerErrors();


                //Generate the fake data again
                {
                    for (int i1 = 0; i1 < LatentSize; i1++)
                    {
                        data_vec.Memory[i1] = (float)r_latent.NextGaussian(0, 1);//LatentSize;
                    }
                    fake_result = generator.ForwardPropagate(data_vec);
                    var d_gen_class = discriminator.ForwardPropagate(fake_result);

                    //Compute discriminator crossentropy loss assuming fake is real and propagate
                    gen_loss.LossDeriv(d_gen_class[0], one, g_loss, 0.01f * sgd_gen.L2Val / sgd_gen.Net);
                    var d_err = discriminator_back.ComputeGradients(g_loss);
                    generator_back.ComputeGradients(d_err);
                    generator_back.ComputeLayerErrors(d_err);

                    g_class_f = d_gen_class[0].Memory[0];
                    gen_loss.Loss(d_gen_class[0], one, loss_reader, 0.01f * sgd_gen.L2Val / sgd_gen.Net);
                    g_loss_f = loss_reader.Memory[0];
                    loss_reader.Memory[0] = 0;

                    //Update generator
                    sgd_gen.Update(0);
                    generator_back.UpdateLayers(sgd_gen);
                    generator_back.ResetLayerErrors();
                    discriminator.ResetLayerErrors();
                }
            }

            discriminator.Save($@"{dir}\network_final.bin");
            Console.WriteLine("DONE.");
        }