Esempio n. 1
0
        private BaseModel LoadModel(string modelPath, string weightsPath)
        {
            string    text  = File.ReadAllText(modelPath);
            BaseModel model = Model.ModelFromJson(text);

            model.LoadWeight(weightsPath);

            model.Compile(loss: "categorical_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" });

            return(model);
        }
Esempio n. 2
0
        public void CreateModels()
        {
            var discriminatorOptimizer = new Adam(lr: 0.01f);
            var ganOptimizer           = new Adam(lr: 0.005f);

            // Image generator
            Sequential generatorSeq   = new Sequential();
            Input      generatorInput = new Input(new Shape(ImageData.TotalInputShape));

            generatorSeq.Add(generatorInput);
            generatorSeq.Add(new Dense(128, activation: "relu"));
            generatorSeq.Add(new Dropout(0.3f));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(256, activation: "relu"));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(512, activation: "relu"));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(2048, activation: "relu"));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(ImageData.OutputShape + ImageData.ManualInputShape, activation: "sigmoid"));

            generatorModel = generatorSeq;
            generatorModel.Compile(optimizer: ganOptimizer, loss: "mse", metrics: new string[] { "accuracy" });

            // Image discriminator
            Sequential discriminatorSeq = new Sequential();

            discriminatorSeq.Add(new Dense(2048, activation: "relu", input_shape: new Shape(ImageData.OutputShape + ImageData.ManualInputShape)));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(512, activation: "relu"));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(128, activation: "relu"));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(32, activation: "relu"));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(1, activation: "sigmoid"));

            discriminatorModel = discriminatorSeq;
            discriminatorModel.Compile(optimizer: discriminatorOptimizer, loss: "binary_crossentropy", metrics: new string[] { "accuracy" });

            // AdversarialModel
            Sequential ganSeq = new Sequential();

            //ganSeq.Add(new Concatenate(generatorModel.ToLayer(), generatorInput));
            ganSeq.Add(generatorModel.ToLayer());
            ganSeq.Add(discriminatorModel.ToLayer());

            ganModel = ganSeq;
            ganModel.Compile(optimizer: ganOptimizer, loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
        }
Esempio n. 3
0
        public static (BaseModel model, double accuracy) TrainAndTest(BaseModel model)
        {
            // Global parameters
            string datasetPath = @"Sudoku.NeuralNetwork\Dataset\sudoku.csv.gz";
            int    numSudokus  = 1000;

            // ML parameters
            double testPercent  = 0.2;
            float  learningRate = .001F;
            int    batchSize    = 32;
            int    epochs       = 2;

            Console.WriteLine("Initialize dataset");
            var(sPuzzles, sSols) = DataSetHelper.ParseCSV(datasetPath, numSudokus);
            var(_sPuzzzlesTrain, _sPuzzlesTest) = DataSetHelper.SplitDataSet(sPuzzles, testPercent);
            var(_sSolsTrain, _sSolsTest)        = DataSetHelper.SplitDataSet(sSols, testPercent);

            Console.WriteLine("Preprocess data");
            var sPuzzzlesTrain = DataSetHelper.PreprocessSudokus(_sPuzzzlesTrain);
            var sSolsTrain     = DataSetHelper.PreprocessSudokus(_sSolsTrain);
            var sPuzzlesTest   = DataSetHelper.PreprocessSudokus(_sPuzzlesTest);
            var sSolsTest      = DataSetHelper.PreprocessSudokus(_sSolsTest);

            // Add optimizer
            var adam = new Keras.Optimizers.Adam(learningRate);

            model.Compile(loss: "sparse_categorical_crossentropy", optimizer: adam);

            Console.WriteLine("Train model");
            model.Fit(sPuzzzlesTrain, sSolsTrain, batch_size: batchSize, epochs: epochs);

            // Test model
            int correct = 0;

            for (int i = 0; i < 1; i++)
            {
                Console.WriteLine("Testing " + i);

                // Predict result
                var prediction = Solve(sPuzzlesTest[i], model);

                // Convert to sudoku
                var sPredict = new Core.Sudoku()
                {
                    Cells = prediction.flatten().astype(np.int32).GetData <int>().ToList()
                };
                var sSol = new Core.Sudoku()
                {
                    Cells = ((sSolsTest[i] + 0.5) * 9).flatten().astype(np.int32).GetData <int>().ToList()
                };

                // Compare sudoku
                var same = true;
                for (int j = 0; j < 9; j++)
                {
                    for (int k = 0; k < 9; k++)
                    {
                        if (sPredict.GetCell(j, k) != sSol.GetCell(j, k))
                        {
                            same = false;
                        }
                    }
                }
                Console.WriteLine("Prediction : \n" + sPredict);
                Console.WriteLine("Solution : \n" + sSol);

                if (same)
                {
                    correct += 1;
                }
            }

            // Calculate accuracy
            var accuracy = (correct / sPuzzlesTest.size) * 100;

            // Return
            return(model, accuracy);
        }
Esempio n. 4
0
        public async void TrainMovingData()
        {
            try {
                // Compile
                var optimizer = new Adam(lr: 0.001f);
                model.Compile(optimizer: optimizer, loss: "mse", metrics: new string[] { "accuracy" });

                // Train
                MovingDataset[] movingTrainDatasets = LoadMovingDatasets();
                int             dataCount           = 0;

                List <float[, ]> inputList  = new List <float[, ]>();
                List <float[, ]> outputList = new List <float[, ]>();
                for (int i = 0; i < movingTrainDatasets.Length; ++i)
                {
                    MovingDataset datasetPart = movingTrainDatasets[i];

                    TrainData trainData = datasetPart.GetTrainData();

                    inputList.Add(trainData.inputs);
                    outputList.Add(trainData.outputs);
                    dataCount += trainData.inputs.GetLength(0);
                }
                float[,] inputs  = new float[dataCount, MovingDataset.InputFloatCount];
                float[,] outputs = new float[dataCount, MovingDataset.OutputFloatCount];

                int inputIndex = 0;
                foreach (float[,] input in inputList)
                {
                    for (int i = 0; i < input.GetLength(0); ++i)
                    {
                        for (int f = 0; f < MovingDataset.InputFloatCount; ++f)
                        {
                            inputs[inputIndex, f] = input[i, f];
                        }

                        ++inputIndex;
                    }
                }

                int outputIndex = 0;
                foreach (float[,] output in outputList)
                {
                    for (int i = 0; i < output.GetLength(0); ++i)
                    {
                        for (int f = 0; f < MovingDataset.OutputFloatCount; ++f)
                        {
                            outputs[outputIndex, f] = output[i, f];
                        }

                        ++outputIndex;
                    }
                }

                for (; ;)
                {
                    model.Fit(inputs, outputs, batch_size: 100, epochs: 100, verbose: 1);

                    SaveModel();

                    await Task.Delay(10);
                }



                //float[] predicts = model.Predict(x).GetData<float>();

                //Stopwatch watch = new Stopwatch();
                //watch.Start();
                //for (int i = 0; i < 1; ++i) {
                //	predicts = model.Predict(x, verbose: 0).GetData<float>();

                //	//Debug.WriteLine($"Result: ({string.Join(",", predicts)})");
                //}
                //watch.Stop();

                //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => {
                //	MainWindow.Instance.DebugTextBox.Text = watch.GetElapsedMilliseconds().ToString();
                //}));
            } catch (Exception ex) {
                MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => {
                    MainWindow.Instance.DebugTextBox.Text = ex.ToString();
                }));
            }
        }