Beispiel #1
0
        public void TestIrisDataset()
        {
            int trainingSamples = IrisDataset.input.Length;

            Backprop bprop = new Backprop(1e-2, abstol: 1e-4, reltol: 1e-7, adjustThreshold: 1e-20);

            bprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            BatchBackprop bbprop = new BatchBackprop(1e-2, abstol: 1e-4, reltol: 1e-7, adjustThreshold: 1e-20);

            bbprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            QProp qprop = new QProp(abstol: 1e-4, reltol: 1e-7, adjustThreshold: 1e-20, InitialLearningRate: 1e-4);

            qprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            StochasticBatch sprop = new StochasticBatch(40, 1e-2);

            sprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            StochasticQprop sqprop = new StochasticQprop(40);

            sqprop.UnknownCaseFaced += AddRule <GaussianRule2>;

            double[][] x;
            double[][] y;
            double[][] tx;
            double[][] ty;
            SampleData(IrisDataset.input, IrisDataset.output, 120, out x, out y, out tx, out ty);

            subtestIris(x, y, tx, ty, bprop);
            subtestIris(x, y, tx, ty, bbprop);
            subtestIris(x, y, tx, ty, qprop);
            subtestIris(x, y, tx, ty, sprop);
            subtestIris(x, y, tx, ty, sqprop);
        }
Beispiel #2
0
        public void ANFIS_OutputFromDataSet()
        {
            var sampleSize = RobotArmDataSet.Input.Length - 1;
            //StochasticBatch sprop = new StochasticBatch(sampleSize, 1e-5);
            //sprop.UnknownCaseFaced += AddRule<GaussianRule2>;
            //var sprop = new Backprop(1e-1);
            var sprop     = new StochasticQprop(sampleSize);
            var extractor = new KMEANSExtractorIO(25);

            //ANFIS fis = ANFISBuilder<GaussianRule2>.Build(RobotArmDataSet.Input, RobotArmDataSet.OutputTheta1, extractor, sprop, 150);
            ANFIS fis = ANFISBuilder <GaussianRule> .Build(RobotArmDataSet.Input, RobotArmDataSet.OutputTheta1, extractor, sprop, 150);

            var output1 = fis.Inference(new[] { 1.10413546487088, 2.81104319371924 }).FirstOrDefault();             // 1.1
            var output2 = fis.Inference(new[] { 2.31665592712393, 1.9375717475909 }).FirstOrDefault();              // 0.6
            var output3 = fis.Inference(new[] { 2.88944142930409, 16.7526454098038 }).FirstOrDefault();             // 1.4
        }
Beispiel #3
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="ruleNumber"></param>
        /// <param name="maxIterations"></param>
        /// <returns></returns>
        public Task <bool> TrainANFIS(int ruleNumber, int maxIterations, bool useAnalicitalOutcomeForTraining = false)
        {
            return(Task.Run(() => {
                if (!IsDataSetCalculated)
                {
                    throw new ApplicationException("DataSet is not calculated or provided.");
                }

                var sampleSize = Positions.Count() - 1;
                var dynamicObj = useAnalicitalOutcomeForTraining ? Positions.Select(x => new { Point = x, KinematicOutCome = CalculateArmJoint(x).GetAwaiter().GetResult().FirstOrDefault() }) : null;

                var input = useAnalicitalOutcomeForTraining
                                                                ? dynamicObj.Select(x => x.Point).ConvertToANFISParameter()
                                                                : Positions.ConvertToANFISParameter();
                var theta1ANFIS = Task.Run(() => {
                    var sPropTheta1 = new StochasticQprop(sampleSize);
                    var extractorForTheta1 = new KMEANSExtractorIO(ruleNumber);
                    var expectedOutcome = useAnalicitalOutcomeForTraining
                                                                                        ? dynamicObj.Select(x => new[] { x.KinematicOutCome.Theta1.ConvertRadiansToDegrees() }).ToArray()
                                                                                        : AnglesGrid.First().ConvertToANFISParameter();
                    Theta1ANFIS = ANFISBuilder <GaussianRule> .Build(input,
                                                                     expectedOutcome, extractorForTheta1, sPropTheta1, maxIterations);
                });
                var theta2ANFIS = Task.Run(() => {
                    var sPropTheta2 = new StochasticQprop(sampleSize);
                    var extractorForTheta2 = new KMEANSExtractorIO(ruleNumber);
                    var expectedOutcome2 = useAnalicitalOutcomeForTraining
                                                                                ? dynamicObj.Select(x => new[] { x.KinematicOutCome.Theta2.ConvertRadiansToDegrees() }).ToArray()
                                                                                : AnglesGrid.Last().ConvertToANFISParameter();

                    Theta2ANFIS = ANFISBuilder <GaussianRule> .Build(input,
                                                                     expectedOutcome2, extractorForTheta2, sPropTheta2, maxIterations);
                });

                Task.WaitAll(theta1ANFIS, theta2ANFIS);
                IsANFISTrained = true;
                return true;
            }));
        }
Beispiel #4
0
        public void TestOptimization1()
        {
            Backprop        bprop  = new Backprop(1e-2);
            BatchBackprop   bbprop = new BatchBackprop(1e-2);
            QProp           qprop  = new QProp();
            StochasticBatch sprop  = new StochasticBatch(100, 1e-2);
            StochasticQprop sqprop = new StochasticQprop(100);

            int trainingSamples = 1000;

            double[][] x  = new double[trainingSamples][];
            double[][] y  = new double[trainingSamples][];
            double[][] tx = new double[trainingSamples][];
            double[][] ty = new double[trainingSamples][];

            Random rnd = new Random();

            for (int i = 0; i < trainingSamples; i++)
            {
                double valx = 0.5 - rnd.NextDouble();
                double valy = 0.5 - rnd.NextDouble();

                x[i] = new double[] { valx, valy };
                y[i] = new double[] { 1 };


                valx = 0.5 - rnd.NextDouble();
                valy = 0.5 - rnd.NextDouble();

                tx[i] = new double[] { valx, valy };
                ty[i] = new double[] { 1 };
            }

            subTestOptimization1(bprop, x, y, tx, ty);
            subTestOptimization1(bbprop, x, y, tx, ty);
            subTestOptimization1(qprop, x, y, tx, ty);
            subTestOptimization1(sprop, x, y, tx, ty);
            subTestOptimization1(sqprop, x, y, tx, ty);
        }
Beispiel #5
0
        public void TestOptimization2()
        {
            Backprop        bprop  = new Backprop(1e-2);
            BatchBackprop   bbprop = new BatchBackprop(1e-2);
            QProp           qprop  = new QProp();
            StochasticBatch sprop  = new StochasticBatch(100, 1e-2);
            StochasticQprop sqprop = new StochasticQprop(100);

            int trainingSamples = 100;

            double[][] x  = new double[trainingSamples][];
            double[][] y  = new double[trainingSamples][];
            double[][] tx = new double[trainingSamples][];
            double[][] ty = new double[trainingSamples][];

            Random rnd = new Random();

            for (int i = 0; i < trainingSamples; i++)
            {
                bool   isRigth = i % 2 == 0;
                double valx    = (isRigth ? 1 : -1) + (0.5 - rnd.NextDouble());

                x[i] = new double[] { valx };
                y[i] = new double[] { isRigth ? 1 : 0, isRigth ? 0 : 1 };

                valx = (isRigth ? 1 : -1) + (0.5 - rnd.NextDouble());

                tx[i] = new double[] { valx };
                ty[i] = new double[] { isRigth ? 1 : 0, isRigth ? 0 : 1 };
            }

            subTestOptimization2(bprop, x, y, tx, ty);
            subTestOptimization2(bbprop, x, y, tx, ty);
            subTestOptimization2(qprop, x, y, tx, ty);
            subTestOptimization2(sprop, x, y, tx, ty);
            subTestOptimization2(sqprop, x, y, tx, ty);
        }
Beispiel #6
0
        public void TestLogisticMap()
        {
            int trainingSamples = 2000;

            double[][] x  = new double[trainingSamples][];
            double[][] y  = new double[trainingSamples][];
            double[][] tx = new double[trainingSamples][];
            double[][] ty = new double[trainingSamples][];

            double px = 0.1;
            double r  = 3.8;//3.56995;
            double lx = r * px * (1 - px);

            for (int i = 0; i < trainingSamples; i++)
            {
                x[i] = new double[] { px, lx };
                px   = lx;
                lx   = r * lx * (1 - lx);
                y[i] = new double[] { lx };
            }

            for (int i = 0; i < trainingSamples; i++)
            {
                tx[i] = new double[] { px, lx };
                px    = lx;
                lx    = r * lx * (1 - lx);
                ty[i] = new double[] { lx };
            }

            Backprop bprop = new Backprop(1e-2);

            bprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            BatchBackprop bbprop = new BatchBackprop(1e-2);

            bbprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            QProp qprop = new QProp();

            qprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            StochasticBatch sprop = new StochasticBatch(500, 1e-2);

            sprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            StochasticQprop sqprop = new StochasticQprop(500);

            sqprop.UnknownCaseFaced += AddRule <GaussianRule2>;

            subtestLogisticsMap <LinearRule>(x, y, tx, ty, bprop);
            subtestLogisticsMap <LinearRule>(x, y, tx, ty, bbprop);
            subtestLogisticsMap <LinearRule>(x, y, tx, ty, qprop);
            subtestLogisticsMap <LinearRule>(x, y, tx, ty, sprop);
            subtestLogisticsMap <LinearRule>(x, y, tx, ty, sqprop);

            bprop = new Backprop(1e-2);
            bprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            bbprop = new BatchBackprop(1e-2);
            bbprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            qprop = new QProp();
            qprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            sprop = new StochasticBatch(500, 1e-2);
            sprop.UnknownCaseFaced += AddRule <GaussianRule2>;
            sqprop = new StochasticQprop(500);
            sqprop.UnknownCaseFaced += AddRule <GaussianRule2>;

            subtestLogisticsMap <GaussianRule>(x, y, tx, ty, bprop);
            subtestLogisticsMap <GaussianRule>(x, y, tx, ty, bbprop);
            subtestLogisticsMap <GaussianRule>(x, y, tx, ty, qprop);
            subtestLogisticsMap <GaussianRule>(x, y, tx, ty, sprop);
            subtestLogisticsMap <GaussianRule>(x, y, tx, ty, sqprop);
        }