예제 #1
0
        static void Main(string[] args)
        {
            var result = Parser.Default.ParseArguments <Options>(args);

            var parsed = result as Parsed <Options>;

            if (parsed != null)
            {
                SetupLogging();

                var options = parsed.Value;

                var logger = LogManager.GetLogger("Trainer");
                logger.Info("Generating training data ...");

                if (!options.SkipGeneration)
                {
                    var data = new TrainingData();

                    data.Generate(options.TrainingSet);
                    data.Generate(options.TestSet);
                }

                var trainer = new NetworkTrainer();
                trainer.Run(options.Model, options.TrainingSet, options.TestSet);
            }
            else
            {
                Console.WriteLine(HelpText.AutoBuild(result));
            }
        }
예제 #2
0
        public void TestTanhLearningOnSinus()
        {
            NNetwork network = NNetwork.HyperbolicNetwork(new int[] { 1, 2, 1 });

            network.RandomizeWeights(1, 2);
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] inputs  = SinusTrainSet()[0];
            double[][] outputs = SinusTrainSet()[1];
            double     error   = 1;
            double     delta   = 1;
            int        j       = 0;

            for (; error > 0.01 && !(delta <= 0.000001) || j == 1; j++)
            {
                trainer.TrainClassification(inputs, outputs);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            double[][] input_test  = SinusTrainSet(20)[0];
            double[][] output_test = SinusTrainSet(20)[1];
            trainer.IsLearning = false;
            trainer.TrainClassification(input_test, output_test);
            error = trainer.GetError();
            Assert.Less(error, 0.53);
        }
예제 #3
0
        static void Main(string[] args)
        {
            //IList<Tuple<double[], double[]>> trainset = new List<Tuple<double[], double[]>>
            //{
            //    Tuple.Create<double[], double[]>(new double[]{ 97, 98, 97, 98, 97, 98, 97, 98 }, new double[]{ 1.0 / 97, 1.0 / 98 }),
            //    Tuple.Create<double[], double[]>(new double[]{ 98, 98, 98, 98, 98, 98, 98, 98 }, new double[]{ 1.0 / 98, 1.0 / 98 }),
            //    Tuple.Create<double[], double[]>(new double[]{ 95, 101, 95, 101, 95, 101, 95, 101 }, new double[]{ 1.0 / 95, 1.0 / 101 }),
            //    Tuple.Create<double[], double[]>(new double[]{ 120, 121, 120, 121, 120, 121, 120, 121 }, new double[]{ 1.0 / 120, 1.0 / 121 })
            //};

            IList <Tuple <double[], double[]> > trainset = new List <Tuple <double[], double[]> >
            {
                Tuple.Create <double[], double[]>(new double[] { 1, 0, 1, 0, 1, 0, 1, 0 }, new double[] { 1, 0 }),
                Tuple.Create <double[], double[]>(new double[] { 0, 1, 0, 1, 0, 1, 0, 1 }, new double[] { 0, 1 }),
                Tuple.Create <double[], double[]>(new double[] { 0, 0, 0, 0, 0, 0, 0, 0 }, new double[] { 0, 0 }),
                Tuple.Create <double[], double[]>(new double[] { 1, 1, 1, 1, 1, 1, 1, 1 }, new double[] { 1, 1 })
            };

            NeuralNetwork net = NeuralNetwork.GetInstance(inputs: null);

            NetworkTrainer.Train(net /*, trainset*/);
            NetworkTrainer.Test(net, trainset);

            Console.ReadKey();
        }
예제 #4
0
        public ImageCompress()
        {
            int[] layersizes = new int[3] {
                8, 18, 8
            };
            ActivationFunction[] activFunctions = new ActivationFunction[3] {
                ActivationFunction.None, ActivationFunction.Sigmoid, ActivationFunction.Linear
            };


            XmlDocument xdoc = new XmlDocument();

            xdoc.Load(Server.MapPath("resources/ann.xml"));

            ds = new DataSet();
            ds.Load((XmlElement)xdoc.DocumentElement.ChildNodes[0]);

            bpnetwork = new BackPropNetwork(layersizes, activFunctions);
            nt        = new NetworkTrainer(bpnetwork, ds);

            nt.maxError      = 0.00001;
            nt.maxiterations = 10000;
            nt.nudgewindow   = 500;
            nt.traininrate   = 0.1;
            nt.TrainDataset();

            // save error
            double[] err      = nt.geteHistory();
            string[] filedata = new string[err.Length];

            for (int i = 0; i < err.Length; i++)
            {
                filedata[i] = i.ToString() + " " + err[i].ToString();
            }
        }
예제 #5
0
        private void StartNetworks()
        {
            //Based off of http://www.poeex.info/index/index/table/1
            var rates = new Dictionary <CurrencyType, float>()
            {
                { CurrencyType.ChromaticOrb, 1f / 13f },
                { CurrencyType.OrbOfAlteration, 1f / 13f },
                { CurrencyType.JewellersOrb, 1f / 7f },
                { CurrencyType.OrbOfChance, 1f / 5f },
                { CurrencyType.CartographersChisel, 1 / 4f },
                { CurrencyType.OrbOfFusing, 1f / 2f },
                { CurrencyType.OrbOfAlchemy, 1f / 3.5f },
                { CurrencyType.OrbOfScouring, 1f / 1.8f },
                { CurrencyType.BlessedOrb, 1f / 1.3f },
                { CurrencyType.OrbOfRegret, 1f },
                { CurrencyType.RegalOrb, 1.3f },
                { CurrencyType.GemcuttersPrism, 1.8f },
                { CurrencyType.DivineOrb, 17f },
                { CurrencyType.ExaltedOrb, 50f },
                { CurrencyType.VaalOrb, 1f },
            };
            var conversionTable = new ConversionTable(rates);

            trainer = new NetworkTrainer(conversionTable);
            trainer.StartTraining();
        }
예제 #6
0
        static void Step5()
        {
            Console.WriteLine("STEP 5: Train neural network...");
            NetworkTrainer networkteTrainer = new NetworkTrainer();

            networkteTrainer.Train(DataFilesInfoGetter.NetworkFile, DataFilesInfoGetter.NormalizedTrainingFile);
        }
예제 #7
0
        public void TrainPrediction()
        {
            NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 5, 2, 2 });
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[] train_set = new double[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            trainer.TrainPrediction(train_set);
            //todo
        }
예제 #8
0
        static void Main(string[] args)
        {
            string[] lines = { "First line", "Second line", "Third line" };
            System.IO.File.WriteAllLines(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\WriteLines.txt", lines);
            // Training Data
            XmlDocument doc = new XmlDocument();

            doc.Load(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\simpleData.xml");

            DataSet ds = new DataSet();

            ds.Load((XmlElement)doc.DocumentElement.ChildNodes[0]);

            // Network to train
            int[] layerSizes = new int[3] {
                25, 35, 4
            };
            TransferFunction[] tFuncs = new TransferFunction[3] {
                TransferFunction.None,
                TransferFunction.Sigmoid,
                TransferFunction.Linear
            };

            BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, tFuncs);

            // Network trainer!
            NetworkTrainer nt = new NetworkTrainer(bpn, ds);

            nt.maxError = 0.001; nt.maxIterations = 100000;

            nt.nudge_window = 500;

            // Train
            Console.WriteLine("Training...");
            nt.TrainDataSet();
            Console.WriteLine("Done!");

            // Save the network
            nt.network.Save(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\simpleData.xml");

            // Save the error history
            double[] error    = nt.GetErrorHistory();
            string[] filedata = new string[error.Length];
            for (int i = 0; i < error.Length; i++)
            {
                filedata[i] = i.ToString() + " " + error[i].ToString();
            }

            File.WriteAllLines(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\simple_errors.txt", filedata);

            // End of program
            Console.WriteLine("\n\nPress Enter...");
            Console.ReadLine();
        }
예제 #9
0
 private void btnRunTraining_Click(object sender, EventArgs e)
 {
     Network        nn = new Network(4, new[] { 2, 2 }, 1);
     NetworkTrainer nt = new NetworkTrainer(nn);
     //Tuple<double[],double[]>[] testData = new Tuple<double[], double[]>[](new double[]{1,2},new double[]{2,3});
     //Tuple<double[], double[]>[] trainingData = new Tuple<double[], double[]>[](new double[] { 3, 4 }, new double[] { 3, 4 });
     //foreach (Tuple<int, double?> results in nt.StochasticGradientDescent(10000, trainingData, 1000, 100, .05, testData)) ;
     //{
     //
     //}
 }
예제 #10
0
        private static void Main(string[] args)
        {
            var net     = new Network();
            var trainer = new NetworkTrainer();
            var tester  = new NetworkTester();

            trainer.Train(net);
            tester.Test(net);

            Console.ReadKey();
        }
예제 #11
0
        public Form1()
        {
            InitializeComponent();
            g = panel1.CreateGraphics();
            g.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.AntiAlias;
            pen             = new Pen(Color.Black, 5);
            pen.StartCap    = pen.EndCap = System.Drawing.Drawing2D.LineCap.Round;

            //Создает сеть из 784 входной слой, 2 скрытых слоя, 10 выходных
            Network        _network        = new Network(784, new[] { 100, 50 }, 10);
            NetworkTrainer _networkTrainer = new NetworkTrainer(_network);
        }
예제 #12
0
        /// <summary>
        /// Trains the network
        /// </summary>
        /// <param name="network">the network to train</param>
        private static void Train(Network network)
        {
            Tuple <double[], double[]>[] _trainingData = GetMNISTData(true, 60000);
            Tuple <double[], double[]>[] _testingData  = GetMNISTData(false, 5000);

            NetworkTrainer _trainer = new NetworkTrainer(network);

            foreach (Tuple <int, double?> _result in _trainer.StochasticGradientDescent(10000, _trainingData, 1000, 100, .05, _testingData))
            {
                Console.WriteLine($"Epoch {_result.Item1}: {_result.Item2}% error");
            }
        }
예제 #13
0
        public void DimensionTestCheck()
        {
            NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 2, 4, 3 });
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] incorrect_input  = new double[1][] { new double[3] };
            double[][] correct_input    = new double[1][] { new double[2] };
            double[][] incorrect_output = new double[1][] { new double[4] };
            double[][] correct_output   = new double[1][] { new double[3] };
            Assert.Throws(typeof(IncorrectInputDimensionException),
                          () => trainer.TrainClassification(incorrect_input, correct_output));
            Assert.Throws(typeof(IncorrectOutputDimensionException),
                          () => trainer.TrainClassification(correct_input, incorrect_output));
        }
예제 #14
0
        public void TestCostFunctionAccumulation()
        {
            NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 2, 4, 3 });
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[] train_set = new[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            Assert.Throws(typeof(NoErrorInfoYetException), () => trainer.GetError());
            double error;

            trainer.TrainPrediction(train_set);
            error = trainer.GetError();
            Assert.AreNotEqual(error, 0);
            trainer.TrainPrediction(train_set);
            Assert.AreNotEqual(error, trainer.GetError());
        }
예제 #15
0
        public async Task TrainNetworkAsync()
        {
            int[] layerSize = CreateLayersSize();
            TransferFunction[]     tfunc = AddTransferFunctionToLayers();
            BackPropagationNetwork bpn   = null;
            NetworkTrainer         nt;

            if (bpn == null)
            {
                bpn = new BackPropagationNetwork(layerSize, tfunc);
            }
            await FixInputs();

            nt = new NetworkTrainer(bpn, _dataSet);
            Task.Run(() =>
            {
                nt.MaxError      = MaxError;
                nt.MaxIterations = MaxIterations;
                nt.NudgeWindow   = NudgeWindow;

                nt.TrainDataSet();

                nt.Network.Save(@"tezine.xml");

                double[] error    = nt.GetErrorHistory();
                string[] filedata = new string[error.Length];
                for (int i = 0; i < error.Length; i++)
                {
                    filedata[i] = i.ToString() + " " + error[i].ToString();
                }

                File.WriteAllLines(@"greske.txt", filedata);
                NetworkTrained?.Invoke(this, "Završeno treniranje.");
            });

            dynamic settings = SetSettingsForProgressReportPopup();

            TrainingNetworkReport.Status        = "Treniranje mreže...";
            TrainingNetworkReport.Progress      = 0;
            TrainingNetworkReport.MaxIterations = MaxIterations;
            TrainingNetworkReport.Error         = 0;
            WindowManager window = new WindowManager();

            window.ShowDialog(new TrainingNetworkProgresBarViewModel(ref _trainingNetworkReport, settings, this, nt));
        }
예제 #16
0
        //static void FuckWithWeights(Network network)
        //{
        //    network.HiddenLayers[0].Neurons[0].Bias = -1.74976547;
        //    network.HiddenLayers[0].Neurons[0].UpstreamDendrites[0].Weight = 0.22117967;
        //    network.HiddenLayers[0].Neurons[0].UpstreamDendrites[1].Weight = -1.07004333;

        //    network.HiddenLayers[0].Neurons[1].Bias = 0.3426804;
        //    network.HiddenLayers[0].Neurons[1].UpstreamDendrites[0].Weight = -0.18949583;
        //    network.HiddenLayers[0].Neurons[1].UpstreamDendrites[1].Weight = 0.25500144;

        //    network.HiddenLayers[0].Neurons[2].Bias = 1.1530358;
        //    network.HiddenLayers[0].Neurons[2].UpstreamDendrites[0].Weight = -0.45802699;
        //    network.HiddenLayers[0].Neurons[2].UpstreamDendrites[1].Weight = 0.43516349;

        //    network.HiddenLayers[0].Neurons[3].Bias = -0.25243604;
        //    network.HiddenLayers[0].Neurons[3].UpstreamDendrites[0].Weight = -0.58359505;
        //    network.HiddenLayers[0].Neurons[3].UpstreamDendrites[1].Weight = 0.81684707;

        //    network.HiddenLayers[0].Neurons[4].Bias = 0.98132079;
        //    network.HiddenLayers[0].Neurons[4].UpstreamDendrites[0].Weight = 0.67272081;
        //    network.HiddenLayers[0].Neurons[4].UpstreamDendrites[1].Weight = -0.10441114;

        //    network.OutputLayer.Neurons[0].Bias = 0.51421884;
        //    network.OutputLayer.Neurons[0].UpstreamDendrites[0].Weight = -0.53128038;
        //    network.OutputLayer.Neurons[0].UpstreamDendrites[1].Weight = 1.02973269;
        //    network.OutputLayer.Neurons[0].UpstreamDendrites[2].Weight = -0.43813562;
        //    network.OutputLayer.Neurons[0].UpstreamDendrites[3].Weight = -1.11831825;
        //    network.OutputLayer.Neurons[0].UpstreamDendrites[4].Weight = 1.61898166;
        //}

        static void Main(string[] args)
        {
            // What I cannot create, I do not understand.
            // ~Richard P. Feynman


            ITrainingDataBuilder trainingDataBuilder = new MNISTTrainingDataBuilder();

            trainingDataBuilder.BuildTrainingData();

            double    totalAccuracy      = 0.0;
            const int NumberOfIterations = 1;

            DateTime start = DateTime.Now;

            for (int c = 0; c < NumberOfIterations; c++)
            {
                Random rand = new Random();

                Network network = Network.BuildNetwork(
                    rand,
                    new Math.CostFunctions.CrossEntropyCostFunction(),
                    new Math.RegularizationFunctions.L2Normalization(.1),
                    Common.WeightIntializerType.RandomGaussianWithNeuronCount,
                    new DropoutLayerOptions(0),
                    784, 10, 30);

                NetworkTrainer networkTrainer = new NetworkTrainer();
                networkTrainer.Train(network,
                                     trainingDataBuilder,
                                     .5, 30, 10,
                                     -1,
                                     OnLearningProgress,
                                     OnValidationDataUpdate);

                totalAccuracy += trainingDataBuilder.GradeResults(network, trainingDataBuilder.TestData) * 100.0;
            }

            Console.WriteLine($"Accurancy: {(totalAccuracy / (NumberOfIterations * 1.0)).ToString("000.00")}% in {(DateTime.Now - start).TotalSeconds} seconds.");

            return;
        }
예제 #17
0
        public ParkingSpaceClassifier()
        {
            //int dataWidth = 4;
            //int dataHeight = 4;
            //int inputSize = dataWidth * dataHeight;
            //Net = new Network(inputSize, new[] { 20, 10 }, 1);

            //NetworkTrainer trainer = new NetworkTrainer(Net);
            ////--------------------------------------------------
            //// Training data
            //List<Tuple<Bitmap, Bitmap>> training = new List<Tuple<Bitmap, Bitmap>>();
            //int trainingSize = 1000;
            //for (int i = 0; i < trainingSize;i++)
            //	training.Add(GenerateLabeled(dataWidth, dataHeight));
            ////--------------------------------------------------
            //// Test data
            //List<Tuple<Bitmap, Bitmap>> test = new List<Tuple<Bitmap, Bitmap>>();
            //int testSize = 200;
            //for (int i = 0; i < testSize; i++)
            //	test.Add(GenerateLabeled(dataWidth, dataHeight));

            //Error = trainer.StochasticGradientDescent(trainingSize, training.Select(t => Flatten(t)).ToArray(), 1000, 100, .05, test.Select(t => Flatten(t)).ToArray());

            if (File.Exists("./data/MNIST_weights.dat"))
            {
                Load();
            }
            else
            {
                Net = new Network(784, new[] { 100, 50 }, 10);
            }

            Iterations = 1000;
            NetworkTrainer trainer = new NetworkTrainer(Net);

            Tuple <double[], double[]>[] _testData     = GetTestData();
            Tuple <double[], double[]>[] _trainingData = GetTrainingData();
            Error = trainer.StochasticGradientDescent(10000, _trainingData, Iterations, 100, .05, _testData);
        }
예제 #18
0
        public void TestBack()
        {
            var parameterGenerator = new ActionParameterGenerator(() => 0.1, () => 1.0);
            var neuronFactor       = new NeuronFactory(parameterGenerator);
            var synapseFactory     = new SynapseFactory(parameterGenerator);
            var network            = NetworkFactory.CreateMultilayerPerceptron(new[] { 2, 2, 1, 1 }, ActivationFunction.Sigmoid,
                                                                               ActivationFunction.Identity, null, neuronFactor, synapseFactory);
            var networkTrainer = new NetworkTrainer(network);
            var x = new Vector(3, 5);

            network.Compute(x);
            networkTrainer.Back(42.0);

            var n = network.OutputLayer[0].Inputs[0].Source;

            Console.WriteLine(n.InputDerivative);
            Console.WriteLine(n.OutputDerivative);
            Console.WriteLine(n.InputDerivativeCount);

            Assert.IsTrue(Math.Abs(-3.987764 - n.InputDerivative) < 0.01);
            Assert.IsTrue(Math.Abs(-41.009155 - n.OutputDerivative) < 0.01);
            Assert.AreEqual(1, n.InputDerivativeCount);
        }
예제 #19
0
        public CompressText()
        {
            int[] layersizes = new int[10] {
                1, 10, 9, 8, 7, 5, 4, 3, 2, 1
            };
            ActivationFunction[] activFunctions = new ActivationFunction[10] {
                ActivationFunction.None, ActivationFunction.Gaussian, ActivationFunction.Sigmoid, ActivationFunction.Sigmoid, ActivationFunction.Sigmoid, ActivationFunction.Sigmoid, ActivationFunction.Sigmoid, ActivationFunction.Sigmoid, ActivationFunction.Sigmoid,
                ActivationFunction.Linear
            };


            XmlDocument xdoc = new XmlDocument();

            xdoc.Load(Path.Combine(HttpRuntime.AppDomainAppPath, "resources/ann.xml"));

            ds = new DataSet();
            ds.Load((XmlElement)xdoc.DocumentElement.ChildNodes[0]);


            bpnetwork = new BackPropNetwork(layersizes, activFunctions);
            nt        = new NetworkTrainer(bpnetwork, ds);

            nt.maxError      = 0.1;
            nt.maxiterations = 10000;
            nt.traininrate   = 0.1;
            nt.TrainDataset();

            // save error
            double[] err      = nt.geteHistory();
            string[] filedata = new string[err.Length];

            for (int i = 0; i < err.Length; i++)
            {
                filedata[i] = i.ToString() + " " + err[i].ToString();
            }
        }
예제 #20
0
        public void TestTrainXor()
        {
            var pg             = new PositiveUniformParameterGenerator();
            var neuronFactor   = new NeuronFactory(pg);
            var synapseFactory = new SynapseFactory(pg);
            var n = NetworkFactory.CreateMultilayerPerceptron(new[] { 2, 2, 1 }, ActivationFunction.Sigmoid,
                                                              ActivationFunction.Identity, null, neuronFactor, synapseFactory);
            var trainer = new NetworkTrainer(n, 0.9, 0.1);

            var examples = new Matrix(new double[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            var labels = new Vector(0, 1, 1, 0);

            trainer.Train(examples, labels, 1000);

            for (var i = 0; i < labels.Length; i++)
            {
                var x = examples.GetRow(i);
                var y = labels[i];
                Console.WriteLine("Actual: {0}, Result: {1}", y, n.Compute(x));
                Assert.IsTrue(Math.Abs(y - n.Compute(x)[0]) < 0.01);
            }
        }
예제 #21
0
 public TrainingNetworkProgresBarViewModel(ref TrainingNetworkProgressReportModel progress, dynamic settings, AdministrationViewModel administration, NetworkTrainer networkTrainer)
 {
     Progress         = progress;
     PopupAnimation   = settings.PopupAnimation;
     Placement        = settings.Placement;
     HorizontalOffset = settings.HorizontalOffset;
     VerticalOffset   = settings.VerticalOffset;
     Width            = settings.Width;
     Height           = settings.Height;
     administration.NetworkTrained     += Administration_NetworkTrained;
     networkTrainer.IterationCompleted += NetworkTrainer_IterationCompleted;
     RunProgressBar();
 }
예제 #22
0
        private static void ParseArgs(string[] args)
        {
            var structuredArgs = StructuredArguments.FromStrings(args);

            void BatchSimulation()
            {
                var fromSeed    = structuredArgs.AsIntOrDefault("From", 1);
                var toSeed      = structuredArgs.AsIntOrDefault("To", 10);
                var td          = BatchSimulator.LinesFromSeedRange(fromSeed, toSeed);
                var outFilename = $"results_from_{fromSeed}_to_{toSeed}.bin";

                TrainingDataPersistence.SaveToDisk(td, outFilename);
            }

            void BatchSimulationOptimizationBased()
            {
                var td = OptimizationBasedGenerator.BatchGenerateTrainingData(100, 100);

                TrainingDataPersistence.SaveToDisk(td, $"new_results.bin");
            }

            void TrainNetwork()
            {
                var td    = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var tvd   = MlUtils.Split(td, 0.5f, false);
                var model = NetworkTrainer.TrainNetworkWithData(tvd.Training, tvd.Validation);

                model.Save("model.hdf5");
            }

            void TrainNetworkOrthogonalSampling()
            {
                var td    = TrainingDataPersistence.ParseCsv(structuredArgs.AsString("Filename"));
                var tvd   = MlUtils.Split(td, 0.999f, true);
                var model = NetworkTrainer.TrainNetworkWithData(tvd.Training, tvd.Validation);

                model.Save("modelOrtho.hdf5");
            }

            void JobGeneration()
            {
                JobGenerator.GenerateJobs();
            }

            void MergeResults()
            {
                var mergedData = MergeResultFiles.MergeDataInPath(".", ".bin");

                TrainingDataPersistence.SaveToDisk(mergedData, "merged.bin");
            }

            void PrintData()
            {
                var maxCount = structuredArgs.AsIntOrDefault("NumRows", int.MaxValue);
                var samples  = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename")).Samples;

                Console.WriteLine(
                    $"Overall number of samples is {samples.Count}. Now showing up to {maxCount} samples...");
                var ctr = 0;

                foreach (var sample in samples)
                {
                    Console.WriteLine(sample);
                    if (ctr++ >= maxCount)
                    {
                        break;
                    }
                }
            }

            void Optimize()
            {
                var methodName = structuredArgs.AsStringOrDefault("Method", "LocalSolver");
                var problem    = ProblemInstanceGenerator.Generate(23);
                BaseProductionRatePredictor predictor = null;

                //predictor = new KerasNeuralProductionRatePredictor(ModelPersistence.LoadFromDisk("model.hdf5"));
                predictor = new OnnxNeuralProductionRatePredictor("converted.onnx");
                //predictor = new MlProductionRatePredictor("model.zip");
                MilkrunBufferAllocationSolution sol = null;

                switch (methodName)
                {
                case "SimulatedAnnealing":
                    sol = SimAnnealOptimizer.Solve(problem, predictor, 1000, 1.0f);
                    break;

                case "LocalSolver":
                    //var evaluator = new SimulationEvaluator(problem);
                    var evaluator = new PredictorBasedEvaluator(problem, predictor);
                    sol = LocalSolverOptimizer.Solve(problem, evaluator);
                    break;
                }

                Console.WriteLine("Solution of optimization = {0}", sol);
                Console.WriteLine("Production rate from predictor = {0}", predictor.Predict(sol.ToSample(problem.ProcessingRates)));
                Console.WriteLine("Production rate from simulation = {0}", SimulationRunner.ProductionRateForConfiguration(sol.ToFlowlineConfiguration(problem.ProcessingRates)));
                Console.WriteLine("Minimum production rate = {0}", problem.MinProductionRate);
            }

            void TrainForest()
            {
                var       td          = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var       tvd         = MlUtils.Split(td, 0.999f, true);
                MLContext context     = new MLContext(23);
                var       transformer = ModelTrainer.TrainModelWithData(context, tvd.Training, tvd.Validation, out var schema);

                context.Model.Save(transformer, schema, "model.zip");
            }

            void AutoMl()
            {
                var       td      = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var       tvd     = MlUtils.Split(td, 1.0f, true);
                MLContext context = new MLContext(23);

                ModelSearch.AutoMlOnDataset(context, tvd.Training, tvd.Validation);
            }

            void DumpPredictionErrors()
            {
                var td  = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var tvd = MlUtils.Split(td, 0.5f, false);
                var dnn = new OnnxNeuralProductionRatePredictor("converted.onnx");

                PredictionSample Predict(Sample sample)
                {
                    var   predictedRate = dnn.Predict(sample);
                    float deviation     = predictedRate - sample.ProductionRate;

                    return(new PredictionSample(sample, deviation));
                }

                var psamples = tvd.Validation.Samples.Take(100).Select(Predict).ToList();

                File.WriteAllText("deviations.csv", CsvSerializer.SerializeToCsv(psamples));
            }

            void TestExhaustiveGenerator()
            {
                int numMachines = 6;
                int numBuffers  = numMachines - 1;
                var features    = new List <FeatureDescription> {
                    new FeatureDescription {
                        IsDiscrete = false,
                        LowerBound = 30,
                        UpperBound = 120,
                        Name       = DefaultFeatures.MilkRunCycleLength.ToString()
                    }
                };

                features.AddRange(Enumerable.Range(0, numMachines).Select(i =>
                                                                          new FeatureDescription {
                    IsDiscrete = false,
                    LowerBound = 0.8,
                    UpperBound = 1.2,
                    Name       = DefaultFeatures.ProcessingRate + $"{i+1}"
                }));
                features.AddRange(Enumerable.Range(0, numMachines).Select(i =>
                                                                          new FeatureDescription {
                    IsDiscrete = false,
                    LowerBound = 0.5,
                    UpperBound = 1.5,
                    Name       = DefaultFeatures.MaterialRatio + $"{i+1}"
                }));
                features.AddRange(Enumerable.Range(0, numBuffers).Select(i =>
                                                                         new FeatureDescription {
                    IsDiscrete = true,
                    LowerBound = 0,
                    UpperBound = 80,
                    Name       = DefaultFeatures.BufferSize + $"{i+1}"
                }));

                // Big ortho experiment
                //int targetSampleCount = 2000000;
                //int subCubeSplitFactor = 2;

                // Small latin only experiment
                int targetSampleCount  = 2000000;
                int subCubeSplitFactor = 1;

                int numCubes  = Utils.Pow(subCubeSplitFactor, features.Count);
                int numValues = (int)Math.Ceiling(targetSampleCount / (double)numCubes) * numCubes;
                var samples   = OrthoLatinHyperCube.PickSamples(features.ToArray(), numValues, subCubeSplitFactor);
                var lines     = new List <string> {
                    string.Join(",", samples.First().ColumnNames())
                };

                lines.AddRange(samples.Select(sample => string.Join(",", sample.ToFloats())));
                File.WriteAllText("ortholatinhypercube.csv", string.Join("\n", lines));

                Console.WriteLine("\nDistinct values");
                for (int i = 0; i < numMachines; i++)
                {
                    if (i < numBuffers)
                    {
                        Console.WriteLine($"Distinct buffer {i+1} sizes = {samples.Select(s => s.BufferSizes[i]).Distinct().Count()}");
                    }
                    Console.WriteLine($"Distinct order up to levels {i+1} = {samples.Select(s => s.OrderUpToLevels[i]).Distinct().Count()}");
                    Console.WriteLine($"Distinct processing rates {i+1} = {samples.Select(s => s.ProcessingRates[i]).Distinct().Count()}");
                }
                Console.WriteLine($"Distinct milk run cycle lengths = {samples.Select(s => s.MilkrunCycleLength).Distinct().Count()}");
            }

            void GenerateInstance()
            {
                int    seed           = structuredArgs.AsInt("Seed");
                string filename       = structuredArgs.AsStringOrDefault("Filename", "instance.json");
                var    flowLineConfig = InstanceGenerator.Generate(seed);

                Utils.SaveObjectAsJson(flowLineConfig, filename);
            }

            var availableActions = new List <Action> {
                BatchSimulation,
                TrainNetwork,
                TrainNetworkOrthogonalSampling,
                JobGeneration,
                MergeResults,
                PrintData,
                Optimize,
                TrainForest,
                AutoMl,
                BatchSimulationOptimizationBased,
                DumpPredictionErrors,
                TestExhaustiveGenerator,
                GenerateInstance
            };

            var actionMappings =
                availableActions.ToDictionary(action => Utils.NameOfLocalActionFunction("ParseArgs", action),
                                              action => action);

            if (args.Length >= 1)
            {
                var action = structuredArgs.GetAction();
                if (actionMappings.ContainsKey(action))
                {
                    actionMappings[action]();
                    return;
                }
            }

            ShowUsage(actionMappings);
        }
예제 #23
0
        public static BasicNetwork TrainNetwork(string trainSetLocation, string path2SaveModel, string trainingHistoryLocation, int numberOfEpochs, double errorThreshold, NetworkTrainer trainer)
        {
            IMLDataSet trainingDataSet = null;
            List <NeuralLayerDescriptor> descriptor = NetworkTrainingInitializer(trainSetLocation, out trainingDataSet);
            var    network = NetworkCreator.CreateNetwork(descriptor);
            ITrain train   = null;

            switch (trainer)
            {
            case NetworkTrainer.BackProp: train = new Backpropagation(network, trainingDataSet);
                break;

            case NetworkTrainer.ResilientBackProp: train = new ResilientPropagation(network, trainingDataSet);
                break;

            case NetworkTrainer.Quick:
                train = new QuickPropagation(network, trainingDataSet);
                break;

            case NetworkTrainer.ScaledConjugateGradient: train = new ScaledConjugateGradient(network, trainingDataSet);
                break;
            }


            int epoch = 0;

            using (System.IO.StreamWriter file =
                       new System.IO.StreamWriter(trainingHistoryLocation))
            {
                do
                {
                    train.Iteration();
                    Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                    file.WriteLine(train.Error);
                    epoch++;
                } while ((epoch < numberOfEpochs) && (train.Error > errorThreshold));
            }

            var serializer = new BinaryFormatter();

            using (var s = new FileStream(path2SaveModel, FileMode.Create))
            {
                serializer.Serialize(s, network);
            }
            return(network);
        }
예제 #24
0
 public void CreateTrainer()
 {
     NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 1, 2, 1 });
     NetworkTrainer trainer = new NetworkTrainer(network);
 }