Example #1
0
        //Add N instances, if selected instances is less than the user-defined minimum
        public ObjectInstanceSelection AddInstances(ObjectInstanceSelection globalBestFlower, int Min)
        {
            int countSelected = globalBestFlower.Attribute_Values.Count(q => q == 1); //count the total number of selected instances
            int diff, c = 0, d = 0;

            if (countSelected < Min)
            {
                //if there are less than N, add N instances, where N = the number of selected instances
                diff = Min - countSelected;
                while (c < diff)
                {
                    if (globalBestFlower.Attribute_Values[d] == 1) //skip the already selected solutions
                    {
                        d++;
                        continue;
                    }
                    else //add instances to positions that are not selected; i.e. where instance mask is equal to 0
                    {
                        globalBestFlower.Attribute_Values[d] = 1;
                        c++; d++;
                    }
                }
            }

            diff = globalBestFlower.Attribute_Values.Count(a => a == 1);

            return(globalBestFlower);
        }
Example #2
0
        //evaluate new bat solution, update better solution (if found), and get global best bat
        public ObjectInstanceSelection EvaluateSolution(double[] batFitnessVal, double[] newBatFitnessVal, double globalBest, List <ObjectInstanceSelection> bats, List <ObjectInstanceSelection> newBats, ObjectInstanceSelection globalBestBat, double loudness)
        {
            double newBest = new double();
            int    maxIndex;
            Random r = new Random();

            //evaluate solution and update, if better solution is found
            for (int i = 0; i < batFitnessVal.Count(); i++)
            {
                if (newBats[i].Fitness > bats[i].Fitness && r.NextDouble() < loudness)
                {
                    bats[i]          = new ObjectInstanceSelection(newBats[i].Attribute_Values, newBats[i].Attribute_Values_Continuous, newBats[i].Frequency, newBats[i].Velocity, newBats[i].Pointers, newBats[i].Fitness); //create a clone of flowers
                    batFitnessVal[i] = newBats[i].Fitness;
                    //bats[i] = newBats[i]; //update solution
                }
            }

            //get blobal best flower
            newBest = newBatFitnessVal.Max(); //get the flower with the highest fitness
            if (newBest > globalBest)
            {
                globalBest    = newBest;
                maxIndex      = Array.IndexOf(newBatFitnessVal, newBest);                                                                                                                                                                                       //select the index for the global best
                globalBestBat = new ObjectInstanceSelection(newBats[maxIndex].Attribute_Values, newBats[maxIndex].Attribute_Values_Continuous, newBats[maxIndex].Frequency, newBats[maxIndex].Velocity, newBats[maxIndex].Pointers, newBats[maxIndex].Fitness); //create a clone of flowers; //select the global best flower
                //globalBestBat = newBats[maxIndex]; //select the global best flower
            }

            return(globalBestBat);
        }
Example #3
0
        //evaluate new flower solution, update better solution (if found), and get global best flower
        public ObjectInstanceSelection EvaluateSolution(double[] flowerFitnessVal, double[] newflowerFitnessVal, double globalBest, List <ObjectInstanceSelection> flowers, List <ObjectInstanceSelection> newFlowers, ObjectInstanceSelection globalBestFlower)
        {
            double newBest = new double();
            int    maxIndex;

            //evaluate solution and update, if better solution is found
            for (int i = 0; i < flowerFitnessVal.Count(); i++)
            {
                if (newFlowers[i].Fitness > flowers[i].Fitness)
                {
                    flowers[i]          = new ObjectInstanceSelection(newFlowers[i].Attribute_Values, newFlowers[i].Attribute_Values_Continuous, newFlowers[i].Pointers, newFlowers[i].Fitness); //create a clone of flowers
                    flowerFitnessVal[i] = newFlowers[i].Fitness;
                }
            }

            //get blobal best flower
            newBest = newflowerFitnessVal.Max(); //get the flower with the highest fitness
            if (newBest > globalBest)
            {
                globalBest       = newBest;
                maxIndex         = Array.IndexOf(newflowerFitnessVal, newBest);                                                                                                                                       //select the index for the global best
                globalBestFlower = new ObjectInstanceSelection(newFlowers[maxIndex].Attribute_Values, newFlowers[maxIndex].Attribute_Values_Continuous, newFlowers[maxIndex].Pointers, newFlowers[maxIndex].Fitness); //create a clone of flowers; //select the global best flower
            }

            return(globalBestFlower);
        }
        /// <summary>
        /// generating the initial locations of n spiders
        /// </summary>
        public List <ObjectInstanceSelection> InitializeBinarySpider(int nSpiders, int subsetSize, int probSize, Problem prob)
        {
            Random     rnd  = new Random();
            List <int> rNum = Training.GetRandomNumbers(probSize, probSize); //generate N random numbers
            FireflyInstanceSelection fpa = new FireflyInstanceSelection();

            List <ObjectInstanceSelection> attr_values = new List <ObjectInstanceSelection>();
            int cnt1 = 0, cnt2 = 0, cnt3 = 0;

            //create an array of size n for x and y
            int[]    xn = new int[subsetSize];        //instance mask
            double[] xn_Con = new double[subsetSize]; //instance mask continuous
            double   freq = new double();             //initialize the frequency of all the bats to zero

            double[] vel = new double[subsetSize];    //initialize the velocity of all the bats to zero
            int[]    pointers = new int[subsetSize];  //array contain pointer to actual individual instance represented in the instance mask
            double   spiderPosition = 0;
            int      k = 0;
            int      bound = 100;

            for (int i = 0; i < nSpiders; i++)
            {
                xn       = new int[subsetSize];
                xn_Con   = new double[subsetSize];
                pointers = new int[subsetSize];
                cnt1     = 0; cnt2 = 0; cnt3 = 0;
                for (int j = 0; j < prob.Count; j++)
                {
                    if (cnt1 < (0.7 * subsetSize) && prob.Y[rNum[j]] == -1) //select 70% positive instance of the subset
                    {
                        //xn_Con[cnt3] = rnd.NextDouble();
                        //xn[cnt3] = fi.Binarize(xn_Con[cnt3], rnd.NextDouble());
                        xn[cnt3]       = rnd.Next(0, 2);                       //initialize each spider position.
                        pointers[cnt3] = rNum[j];
                        spiderPosition = rnd.NextDouble() * 2 * bound - bound; //generate position of spider
                        k++; cnt1++; cnt3++;
                    }
                    else if (cnt2 < (0.3 * subsetSize) && prob.Y[rNum[j]] == 1)
                    {
                        //xn_Con[cnt3] = rnd.NextDouble();
                        //xn[cnt3] = fi.Binarize(xn_Con[cnt3], rnd.NextDouble());
                        xn[cnt3]       = rnd.Next(0, 2);                       //initialize each spider position.
                        pointers[cnt3] = rNum[j];
                        spiderPosition = rnd.NextDouble() * 2 * bound - bound; //generate position of spider
                        k++; cnt2++; cnt3++;
                    }
                    if (cnt3 >= subsetSize)
                    {
                        break;
                    }
                }

                ObjectInstanceSelection OI = new ObjectInstanceSelection(xn, xn_Con, pointers, 0.0, spiderPosition);
                attr_values.Add(OI);
            }

            return(attr_values);
        }
Example #5
0
        /// <summary>
        /// generating the initial locations of n flower
        /// </summary>
        public List <ObjectInstanceSelection> InitializeBinaryFlower(int nFlower, int subsetSize, int probSize, Problem prob)
        {
            Random     rnd  = new Random();
            List <int> rNum = Training.GetRandomNumbers(probSize, probSize); //generate N random numbers
            FireflyInstanceSelection fpa = new FireflyInstanceSelection();

            List <ObjectInstanceSelection> attr_values = new List <ObjectInstanceSelection>();
            int cnt1 = 0, cnt2 = 0, cnt3 = 0;

            //create an array of size n for x and y
            int[]    xn = new int[subsetSize];        //instance mask
            double[] xn_Con = new double[subsetSize]; //instance mask continuous
            int[]    pointers = new int[subsetSize];  //array contain pointer to actual individual instance represented in the instance mask
            int      k = 0;

            for (int i = 0; i < nFlower; i++)
            {
                xn       = new int[subsetSize];
                xn_Con   = new double[subsetSize];
                pointers = new int[subsetSize];
                cnt1     = 0; cnt2 = 0; cnt3 = 0;
                for (int j = 0; j < prob.Count; j++)
                {
                    if (cnt1 < (0.7 * subsetSize) && prob.Y[rNum[j]] == -1) //select 70% positive instance of the subset
                    {
                        //xn[cnt3] = rnd.NextDouble() <= 0.5 ? 0 : 1;
                        xn[cnt3] = rnd.Next(0, 2);
                        //xn_Con[cnt3] = rnd.NextDouble();
                        //xn[cnt3] = fi.Binarize(xn_Con[cnt3], rnd.NextDouble());
                        pointers[cnt3] = rNum[j];
                        k++; cnt1++; cnt3++;
                    }
                    else if (cnt2 < (0.3 * subsetSize) && prob.Y[rNum[j]] == 1)
                    {
                        //xn[cnt3] = rnd.NextDouble() <= 0.5 ? 0 : 1;
                        xn[cnt3] = rnd.Next(0, 2);
                        //xn_Con[cnt3] = rnd.NextDouble();
                        //xn[cnt3] = fi.Binarize(xn_Con[cnt3], rnd.NextDouble());
                        pointers[cnt3] = rNum[j];
                        k++; cnt2++; cnt3++;
                    }
                    if (cnt3 >= subsetSize)
                    {
                        break;
                    }
                }

                ObjectInstanceSelection OI = new ObjectInstanceSelection(xn, xn_Con, pointers, 0.0);
                attr_values.Add(OI);
            }

            return(attr_values);
        }
Example #6
0
        /// <summary>
        /// generating the initial locations of n bats
        /// </summary>
        public List <ObjectInstanceSelection> InitializeBinaryBat(int nBats, int subsetSize, int probSize, Problem prob)
        {
            Random     rnd  = new Random();
            List <int> rNum = Training.GetRandomNumbers(probSize, probSize); //generate N random numbers
            FireflyInstanceSelection fpa = new FireflyInstanceSelection();

            List <ObjectInstanceSelection> attr_values = new List <ObjectInstanceSelection>();
            int cnt1 = 0, cnt2 = 0, cnt3 = 0;

            //create an array of size n for x and y
            int[]    xn = new int[subsetSize];        //instance mask
            double[] xn_Con = new double[subsetSize]; //instance mask continuous
            double   freq = new double();             //initialize the frequency of all the bats to zero

            double[] vel = new double[subsetSize];    //initialize the velocity of all the bats to zero
            int[]    pointers = new int[subsetSize];  //array contain pointer to actual individual instance represented in the instance mask
            int      k = 0;

            for (int i = 0; i < nBats; i++)
            {
                xn       = new int[subsetSize];
                xn_Con   = new double[subsetSize];
                pointers = new int[subsetSize];
                cnt1     = 0; cnt2 = 0; cnt3 = 0;
                for (int j = 0; j < prob.Count; j++)
                {
                    if (cnt1 < (0.7 * subsetSize) && prob.Y[j] == -1) //select 70% negative instance (i.e. ham) of the subset
                    {
                        xn[cnt3] = rnd.Next(0, 2);
                        //xn[cnt3] = 0;
                        pointers[cnt3] = rNum[j];
                        k++; cnt1++; cnt3++;
                    }
                    else if (cnt2 < (0.3 * subsetSize) && prob.Y[j] == 1)
                    {
                        xn[cnt3] = rnd.Next(0, 2);
                        //xn[cnt3] = 0;
                        pointers[cnt3] = rNum[j];
                        k++; cnt2++; cnt3++;
                    }
                    if (cnt3 >= subsetSize)
                    {
                        break;
                    }
                }

                ObjectInstanceSelection OI = new ObjectInstanceSelection(xn, xn_Con, freq, vel, pointers, 0.0);
                attr_values.Add(OI);
            }

            return(attr_values);
        }
        /// <summary>
        /// generating the initial locations of n fireflies
        /// </summary>
        public List <ObjectInstanceSelection> init_ffa(int nFF, int subsetSize, int probSize, Problem prob)
        {
            Random     rnd  = new Random();                                  // Random rx = new Random(); Random ry = new Random();
            List <int> rNum = Training.GetRandomNumbers(probSize, probSize); //generate N random numbers

            List <ObjectInstanceSelection> attr_values = new List <ObjectInstanceSelection>();
            int cnt1 = 0, cnt2 = 0, cnt3 = 0;

            //create an array of size n for x and y
            int[] xn = new int[subsetSize];       //instance mask
            int[] pointers = new int[subsetSize]; //array contain pointer to actual individual instance represented in the instance mask
            int   k = 0;

            for (int i = 0; i < nFF; i++)
            {
                xn       = new int[subsetSize];
                pointers = new int[subsetSize];
                cnt1     = 0; cnt2 = 0; cnt3 = 0;
                for (int j = 0; j < prob.Count; j++)
                {
                    if (cnt1 < (0.7 * subsetSize) && prob.Y[j] == 1) //select 70% positive instance of the subset
                    {
                        xn[cnt3]       = rnd.Next(0, 2);
                        pointers[cnt3] = rNum[k];
                        k++; cnt1++; cnt3++;
                    }
                    else if (cnt2 < (0.3 * subsetSize) && prob.Y[j] == -1)
                    {
                        xn[cnt3]       = rnd.Next(0, 2);
                        pointers[cnt3] = rNum[k];
                        k++; cnt2++; cnt3++;
                    }
                    if (cnt3 >= subsetSize)
                    {
                        break;
                    }
                }

                ObjectInstanceSelection OI = new ObjectInstanceSelection(0.0, 0.0, xn, pointers);
                attr_values.Add(OI);
            }

            return(attr_values);
        }
 /// <summary>
 /// This method ensures that the C and Gamma values do not go beyond specified range
 /// </summary>
 public void findrange(ObjectInstanceSelection fireflies, double minC, double maxC, double minG, double maxG)
 {
     if ((double)fireflies.cValue <= minC)
     {
         fireflies.cValue = minC;
     }
     if ((double)fireflies.cValue >= maxC)
     {
         fireflies.cValue = maxC;
     }
     if ((double)fireflies.GValue <= minG)
     {
         fireflies.GValue = minG;
     }
     if ((double)fireflies.GValue >= maxG)
     {
         fireflies.GValue = maxG;
     }
 }
Example #9
0
        public Problem CuckooSearch(Problem prob, out double storagePercentage)
        {
            int    nNests         = 5;    //number of nests, or number of solutions
            int    subsetSize     = 100;
            int    maxGen         = 5;    //maximum generation
            double discoveryRate  = 0.25; //discovery rate of alien eggs
            double tolerance      = Math.Exp(-5);
            int    lowerBound     = -5;
            int    upperBound     = 5;
            int    totalInstances = prob.X.Count(); //problem size

            double[] cuckooFitnessVal                = new double[nNests];
            double[] newCuckooFitnessVal             = new double[nNests];
            ObjectInstanceSelection globalBestCuckoo = null;
            double globalBest = double.MinValue;
            Random rand       = new Random();

            FlowerPollinationAlgorithm fpa = new FlowerPollinationAlgorithm();

            //initialize population
            List <ObjectInstanceSelection> cuckoos    = InitializeBinaryCuckoo(nNests, subsetSize, totalInstances, prob);
            List <ObjectInstanceSelection> newCuckoos = new List <ObjectInstanceSelection>(cuckoos.Count); //create a clone of bats

            cuckoos.ForEach((item) =>
            {
                newCuckoos.Add(new ObjectInstanceSelection(item.Attribute_Values, item.Attribute_Values_Continuous, item.Pointers, item.Fitness)); //create a clone of flowers
            });

            cuckooFitnessVal    = EvaluateObjectiveFunction(cuckoos, prob);                                                                //evaluate fitness value for all the bats
            newCuckooFitnessVal = EvaluateObjectiveFunction(newCuckoos, prob);                                                             //evaluate fitness value for new bats. Note: this will be the same for this function call, since pollination has not occur
            CuckooFitness(cuckooFitnessVal, cuckoos);                                                                                      //fitness value for each bats
            CuckooFitness(newCuckooFitnessVal, newCuckoos);                                                                                //fitness value for new bats
            globalBestCuckoo = EvaluateSolution(cuckooFitnessVal, newCuckooFitnessVal, globalBest, cuckoos, newCuckoos, globalBestCuckoo); //get the global best flower
            globalBest       = globalBestCuckoo.__Fitness;

            //generate new solutions
            double beta  = 3 / 2;
            double A     = fp.Gamma(1 + beta) * Math.Sin(Math.PI * (beta / 2));
            double B     = fp.Gamma((1 + beta) / 2) * beta;
            double C     = (beta - 1) / 2;
            double D     = Math.Pow(2, C);
            double E     = A / (B * D);
            double sigma = Math.Pow(E, (1 / beta));

            double F;
            double G;
            double step;
            double stepSize;
            int    x = 0;

            for (int i = 0; i <= maxGen; i++)
            {
                for (int j = 0; j < nNests; j++)
                {
                    for (int k = 0; k < subsetSize; k++)
                    {
                        F    = SimpleRNG.GetNormal() * sigma;
                        G    = SimpleRNG.GetNormal();
                        step = F / Math.Pow(Math.Abs(G), (1 / beta));

                        //In the next equation, the difference factor (s-best) means that when the solution is the best solution, it remains unchanged.
                        //Here the factor 0.01 comes from the fact that L/100 should the typical step size of walks/flights where L is the typical lenghtscale;
                        //otherwise, Levy flights may become too aggresive/efficient, which makes new solutions (even) jump out side of the design domain (and thus wasting evaluations).
                        stepSize = 0.01 * step * (cuckoos[j].Attribute_Values[k] - globalBestCuckoo.Attribute_Values[k]);

                        //Now the actual random walks or levyy flights
                        newCuckoos[j].Attribute_Values[k] = fi.Binarize((newCuckoos[j].Attribute_Values[k] + stepSize) * SimpleRNG.GetNormal(), rand.NextDouble());

                        if (cuckoos[j].Attribute_Values[k] == 1 && newCuckoos[j].Attribute_Values[k] == 0)
                        {
                            x++;
                        }
                    }
                }

                //discovery and randomization - replace some nest by constructing new solutions
                newCuckoos = EmptyNest(cuckoos, newCuckoos, discoveryRate, subsetSize, nNests);

                //Select best solutions from the original population and matured population for the next generation;
                fpa.SelectBestSolution(cuckoos, newCuckoos);

                //evaluate new solution
                newCuckooFitnessVal = EvaluateObjectiveFunction(newCuckoos, prob);                                                             //evaluate fitness value for all the bats
                CuckooFitness(newCuckooFitnessVal, newCuckoos);                                                                                //fitness value for new bats
                globalBestCuckoo = EvaluateSolution(cuckooFitnessVal, newCuckooFitnessVal, globalBest, cuckoos, newCuckoos, globalBestCuckoo); //get the global best flower
                globalBest       = globalBestCuckoo.Fitness;

                //if solution has converged to a optimal user-defined point, stop search
                int Max = 60;          // maximum percentage reduction
                if (globalBest >= Max) //if the percentage reduction has approached 60%, stop search!
                {
                    break;
                }
            }

            //ensure that at least, N instances are selected for classification
            int min = 40; //minimum number of selected instances

            globalBestCuckoo = fpa.AddInstances(globalBestCuckoo, min);

            Problem subBest = fi.buildModelMultiClass(globalBestCuckoo, prob); //build model for the best Instance Mast

            storagePercentage = Training.StoragePercentage(subBest, prob);     //calculate the percent of the original training set was retained by the reduction algorithm
            return(subBest);
        }
Example #10
0
        public Problem Bat(Problem prob)
        {
            //default parameters
            int    populationSize = 5; //number of bats in the population
            int    maxGeneration  = 100;
            int    subsetSize     = 200;
            double loudness       = 0.5;
            double pulseRate      = 0.5;
            int    totalInstances = prob.X.Count(); //problem size
            double frequencyMin   = 0;              //minimum frequency. Frequency range determine the scalings
            double frequencyMax   = 2;              //maximum frequency.
            int    lowerBound     = -2;             //set lower bound - lower boundary
            int    upperBound     = 2;              //set upper bound - upper boundary

            double[] batFitnessVal                = new double[populationSize];
            double[] newbatFitnessVal             = new double[populationSize];
            double   globalBest                   = double.MinValue;
            ObjectInstanceSelection globalBestBat = null;
            Random r = new Random();

            //initialize population
            List <ObjectInstanceSelection> bats    = InitializeBat(populationSize, subsetSize, totalInstances, prob);
            List <ObjectInstanceSelection> newBats = new List <ObjectInstanceSelection>(bats.Count); //create a clone of bats

            bats.ForEach((item) =>
            {
                newBats.Add(new ObjectInstanceSelection(item.__Attribute_Values, item.__Attribute_Values_Continuous, item.__Frequency, item.__Velocity, item.__Pointers, item.__Fitness)); //create a clone of flowers
            });

            batFitnessVal    = fi.EvaluateObjectiveFunction(bats, prob);                                                           //evaluate fitness value for all the bats
            newbatFitnessVal = fi.EvaluateObjectiveFunction(newBats, prob);                                                        //evaluate fitness value for new bats. Note: this will be the same for this function call, since pollination has not occur
            BatFitness(batFitnessVal, bats);                                                                                       //fitness value for each bats
            BatFitness(newbatFitnessVal, newBats);                                                                                 //fitness value for new bats
            globalBestBat = EvaluateSolution(batFitnessVal, newbatFitnessVal, globalBest, bats, newBats, globalBestBat, loudness); //get the global best flower
            globalBest    = globalBestBat.__Fitness;

            //start bat algorithm
            double rand = r.NextDouble(); //generate random number

            for (int i = 0; i < maxGeneration; i++)
            {
                //loop over all bats or solutions
                for (int j = 0; j < populationSize; j++)
                {
                    bats[j].__Frequency = frequencyMin + (frequencyMin - frequencyMax) * rand; //adjust frequency
                    for (int k = 0; k < subsetSize; k++)
                    {
                        double randNum = SimpleRNG.GetNormal();                                                                                                                             //generate random number with normal distribution
                        newBats[j].__Velocity[k] = bats[j].__Velocity[k] + (bats[j].__Attribute_Values_Continuous[k] - globalBestBat.Attribute_Values_Continuous[k]) * bats[j].__Frequency; //update velocity
                        newBats[j].__Attribute_Values_Continuous[k] = bats[j].__Attribute_Values_Continuous[k] + bats[j].__Velocity[k];                                                     //update bat position in continuous space
                        newBats[j].__Attribute_Values_Continuous[k] = SimpleBounds(newBats[j].__Attribute_Values_Continuous[k], lowerBound, upperBound);                                    //ensure that value does not go beyond defined boundary

                        if (rand > pulseRate)                                                                                                                                               //The factor 0.001 limits the step sizes of random walks
                        {
                            newBats[j].__Attribute_Values_Continuous[k] = globalBestBat.Attribute_Values_Continuous[k] + 0.001 * randNum;
                        }

                        newBats[j].__Attribute_Values[k] = fi.Binarize(newBats[j].__Attribute_Values_Continuous[k], r.NextDouble()); //convert to binary
                    }
                }

                //evaluate new solution
                newbatFitnessVal = fi.EvaluateObjectiveFunction(newBats, prob);                                                        //evaluate fitness value for all the bats
                BatFitness(newbatFitnessVal, newBats);                                                                                 //fitness value for new bats
                globalBestBat = EvaluateSolution(batFitnessVal, newbatFitnessVal, globalBest, bats, newBats, globalBestBat, loudness); //get the global best flower
                globalBest    = globalBestBat.__Fitness;
            }

            //ensure that at least, 40 instances is selected for classification
            int countSelected = globalBestBat.__Attribute_Values.Count(q => q == 1); //count the total number of selected instances
            int diff, c = 0, d = 0;
            int Min = 40;                                                            //minimum number of selected instances

            if (countSelected < Min)
            {
                //if there are less than N, add N instances, where N = the number of selected instances
                diff = Min - countSelected;
                while (c < diff)
                {
                    if (globalBestBat.__Attribute_Values[d++] == 1)
                    {
                        continue;
                    }
                    else
                    {
                        globalBestBat.__Attribute_Values[d++] = 1;
                        c++;
                    }
                }
            }

            Problem subBest = fi.buildModel(globalBestBat, prob); //build model for the best Instance Mast

            return(subBest);
        }
Example #11
0
        //Binary Bat
        public Problem BinaryBat(Problem prob, out double storagePercentage)
        {
            //default parameters
            int    populationSize = 3; //number of bats in the population
            int    subsetSize     = 100;
            int    maxGeneration  = 3;
            double loudness       = 0.5;
            double pulseRate      = 0.5;
            int    totalInstances = prob.X.Count(); //problem size
            double frequencyMin   = 0;              //minimum frequency. Frequency range determine the scalings
            double frequencyMax   = 2;              //maximum frequency.
            int    lowerBound     = -2;             //set lower bound - lower boundary
            int    upperBound     = 2;              //set upper bound - upper boundary

            double[] batFitnessVal                = new double[populationSize];
            double[] newbatFitnessVal             = new double[populationSize];
            double   globalBest                   = double.MinValue;
            ObjectInstanceSelection globalBestBat = null;
            Random r = new Random();
            FlowerPollinationAlgorithm fpa = new FlowerPollinationAlgorithm();

            //initialize population
            List <ObjectInstanceSelection> bats    = InitializeBinaryBat(populationSize, subsetSize, totalInstances, prob);
            List <ObjectInstanceSelection> newBats = new List <ObjectInstanceSelection>(bats.Count); //create a clone of bats

            bats.ForEach((item) =>
            {
                newBats.Add(new ObjectInstanceSelection(item.Attribute_Values, item.Attribute_Values_Continuous, item.Frequency, item.Velocity, item.Pointers, item.Fitness)); //create a clone of flowers
            });

            batFitnessVal    = EvaluateObjectiveFunction(bats, prob);                                                              //evaluate fitness value for all the bats
            newbatFitnessVal = EvaluateObjectiveFunction(newBats, prob);                                                           //evaluate fitness value for new bats. Note: this will be the same for this function call, since pollination has not occur
            BatFitness(batFitnessVal, bats);                                                                                       //fitness value for each bats
            BatFitness(newbatFitnessVal, newBats);                                                                                 //fitness value for new bats
            globalBestBat = EvaluateSolution(batFitnessVal, newbatFitnessVal, globalBest, bats, newBats, globalBestBat, loudness); //get the global best flower
            globalBest    = globalBestBat.Fitness;

            //start bat algorithm
            double rand = r.NextDouble(); //generate random number

            for (int i = 0; i < maxGeneration; i++)
            {
                //loop over all bats or solutions
                for (int j = 0; j < populationSize; j++)
                {
                    for (int k = 0; k < subsetSize; k++)
                    {
                        bats[j].Frequency = frequencyMin + (frequencyMin - frequencyMax) * r.NextDouble();                                                       //Adjust frequency
                        double randNum = SimpleRNG.GetNormal();                                                                                                  //generate random number with normal distribution
                        newBats[j].Velocity[k] = newBats[j].Velocity[k] + (bats[j].Attribute_Values[k] - globalBestBat.Attribute_Values[k]) * bats[j].Frequency; //update velocity
                        //newBats[j].Attribute_Values[k] = fpa.ConvertToBinary(newBats[j].Velocity[k], newBats[j].Attribute_Values[k]); //update bat position in the binary space
                        newBats[j].Attribute_Values[k] = TransferFunction(newBats[j].Velocity[k], newBats[j].Attribute_Values[k]);                               //update bat position in the binary space

                        if (rand > pulseRate)
                        {
                            newBats[j].Attribute_Values[k] = globalBestBat.Attribute_Values[k]; //change some of the dimensions of the position vector with some dimension of global best. Refer to reference for more explaination
                        }
                    }
                }

                //Select best solutions from the original population and matured population for the next generation;
                fpa.SelectBestSolution(bats, newBats);

                //evaluate new solution
                newbatFitnessVal = EvaluateObjectiveFunction(newBats, prob);                                                           //evaluate fitness value for all the bats
                BatFitness(newbatFitnessVal, newBats);                                                                                 //fitness value for new bats
                globalBestBat = EvaluateSolution(batFitnessVal, newbatFitnessVal, globalBest, bats, newBats, globalBestBat, loudness); //get the global best flower
                globalBest    = globalBestBat.Fitness;

                //if solution has converged to a optimal user-defined point, stop search
                int Max = 60;          // maximum percentage reduction
                if (globalBest >= Max) //if the percentage reduction has approached 60%, stop search!
                {
                    break;
                }
            }

            //ensure that at least, N instances are selected for classification
            int min = 15; //minimum number of selected instances

            globalBestBat = fpa.AddInstances(globalBestBat, min);

            Problem subBest = fi.buildModelMultiClass(globalBestBat, prob); //build model for the best Instance Mast

            storagePercentage = Training.StoragePercentage(subBest, prob);  //calculate the percent of the original training set was retained by the reduction algorithm
            return(subBest);
        }
        public Problem SocialSpider(Problem prob, out double storagePercentage)
        {
            int    nSpiders = 5; //population size of spiders
            int    subsetSize = 100;
            int    totalInstances = prob.X.Count();;
            int    bound = 100, maxGen = 5;
            double r_a  = 1;   //This parameter controls the attenuation rate of the vibration intensity over distance
            double p_c  = 0.7; // p_c describes the probability of changing mask of spider
            double p_m  = 0.1; // This is also a user-controlled parameter defined in (0, 1). It controls the probability of assigning a one or zero to each bit of a mask
            bool   info = true;

            double[][] globalBestPosition = new double[1][];
            double[]   targetIntensity    = new double[nSpiders]; //best vibration for each spider
            //double[] targetPosition = new double[nSpiders]; //target position for each spider
            double[,] mask     = new double[nSpiders, subsetSize];
            double[,] newMask  = new double[nSpiders, subsetSize];
            double[,] movement = new double[nSpiders, subsetSize];
            double[] inactive                        = new double[nSpiders];
            double[] spiderFitnessVal                = new double[nSpiders];
            double[] newSpiderFitnessVal             = new double[nSpiders];
            ObjectInstanceSelection globalBestSpider = null;
            double globalBest                        = double.MinValue;
            Random rand = new Random();
            FlowerPollinationAlgorithm fpa = new FlowerPollinationAlgorithm();

            //initialize population
            List <ObjectInstanceSelection> spiders    = InitializeBinarySpider(nSpiders, subsetSize, totalInstances, prob);
            List <ObjectInstanceSelection> newSpiders = new List <ObjectInstanceSelection>(spiders.Count); //create a clone of bats

            spiders.ForEach((item) =>
            {
                newSpiders.Add(new ObjectInstanceSelection(item.Attribute_Values, item.Attribute_Values_Continuous, item.Pointers, item.Fitness, item.Position)); //create a clone of flowers
            });

            spiderFitnessVal    = EvaluateObjectiveFunction(spiders, prob);                                                                //evaluate fitness value for all the bats
            newSpiderFitnessVal = EvaluateObjectiveFunction(newSpiders, prob);                                                             //evaluate fitness value for new spiders. Note: this will be the same for this function call, since pollination has not occur
            SpiderFitness(spiderFitnessVal, spiders);                                                                                      //fitness value for each spiders
            SpiderFitness(newSpiderFitnessVal, newSpiders);                                                                                //fitness value for new spider
            globalBestSpider = EvaluateSolution(spiderFitnessVal, newSpiderFitnessVal, globalBest, spiders, newSpiders, globalBestSpider); //get the global best spider
            globalBest       = globalBestSpider.Fitness;

            double[]        standDev        = new double[subsetSize];
            List <double>   listPositions   = new List <double>();
            List <double[]> spiderPositions = new List <double[]>();

            //calculate the standard deviation of all spider positions
            for (int a = 0; a < subsetSize; a++)
            {
                double[] sPositions = new double[nSpiders];
                for (int b = 0; b < nSpiders; b++)
                {
                    sPositions[b] = spiders[b].Attribute_Values[a]; //get all spider positions column wise
                    //sPositions[b] = spiders[b].Attribute_Values_Continuous[a]; //get all spider positions column wise
                }
                spiderPositions.Add(sPositions); //save positions in list
            }

            for (int a = 0; a < subsetSize; a++)
            {
                standDev[a] = getStandardDeviation(spiderPositions[a].ToList()); //calculate standard deviation for each spider solution
            }
            double baseDistance = standDev.Average();                            //calculate the mean of standev

            //compute paired euclidean distances of all vectors in spider; similar to pdist function in matlab. Reference: http://www.mathworks.com/help/stats/pdist.html
            int n = (nSpiders * (nSpiders - 1)) / 2; //total number of elements array dist.

            double[] euclidenDist = new double[n];   //Note that, this is array for paired eucliden distance, similar to pdist() function in matlab.
            int      kk           = 0;

            for (int i = 0; i < nSpiders; i++)
            {
                for (int j = 1 + i; j < nSpiders; j++)
                {
                    //this distance is in pairs -> 1,0; 2,0; 3,0,...n,0; 2,1; 3,1; 4,1,...n,1;.... It is similar to pdist function in matlab
                    //euclidenDist[kk++] = computeEuclideanDistance(spiders[j].Attribute_Values_Continuous, spiders[i].Attribute_Values_Continuous); //generate a vibration for each spider position
                    euclidenDist[kk++] = computeEuclideanDistance(spiders[j].Attribute_Values, spiders[i].Attribute_Values); //generate a vibration for each spider position
                    //distance[i][j] = computeEuclideanDistance(spiders[i].Attribute_Values, spiders[j].Attribute_Values);
                }
            }

            double[,] distance = SquareForm(euclidenDist, nSpiders); //Convert vibration to square matix, using SquareForm() function in matlab. Reference: see Squareform function in google
            //double[,] intensityReceive = new double[nSpiders, nSpiders];
            double[][] intensityReceive = new double[nSpiders][];

            for (int a = 0; a < maxGen; a++)
            {
                for (int j = 0; j < nSpiders; j++)
                {
                    //calculate the intensity for all the generated vibrations
                    intensityReceive[j] = new double[nSpiders];
                    double A = (spiders[j].Fitness + Math.Exp(-100)) + 1;
                    double intensitySource = Math.Log(1 / A);
                    for (int k = 0; k < nSpiders; k++)
                    {
                        double intensityAttenuation = Math.Exp(-distance[j, k] / (baseDistance * r_a));
                        //intensityReceive[j, k] = intensitySource * intensityAttenuation; //intensity for each spider vibration
                        intensityReceive[j][k] = intensitySource * intensityAttenuation; //intensity for each spider vibration
                    }
                }

                //select strongest vibration from intensity
                int row    = intensityReceive.GetLength(0);
                int column = intensityReceive[0].Count();
                //IEnumerable<double> bestReceive = Enumerable.Range(0, row).Select(i => Enumerable.Range(0, column).Select(j => intensityReceive[i, j]).Max()); //get the max value in each row
                IEnumerable <double> bestReceive = Enumerable.Range(0, row).Select(i => Enumerable.Range(0, column).Select(j => intensityReceive[i][j]).Max()); //get the max value in each row

                //IEnumerable<int> bestReceiveIndex = Enumerable.Range(0, row).Select(i => Enumerable.Range(0, column).Select(j => intensityReceive[i, j]).Max()); //get the max value in each row

                //get the index of the strongest vibration
                int[] maxIndex = new int[nSpiders];
                for (int i = 0; i < nSpiders; i++)
                {
                    maxIndex[i] = Array.IndexOf(intensityReceive[i], bestReceive.ElementAt(i));
                }

                //Store the current best vibration
                int[] keepTarget = new int[nSpiders];
                int[] keepMask   = new int[nSpiders];
                double[,] targetPosition = new double[nSpiders, subsetSize];
                for (int i = 0; i < nSpiders; i++)
                {
                    if (bestReceive.ElementAt(i) <= targetIntensity[i])
                    {
                        keepTarget[i] = 1;
                    }

                    inactive[i]        = inactive[i] * keepTarget[i] + keepTarget[i];
                    targetIntensity[i] = (targetIntensity[i] * keepTarget[i]) + bestReceive.ElementAt(i) * (1 - keepTarget[i]);


                    if (rand.NextDouble() < Math.Pow(p_c, inactive[i]))
                    {
                        keepMask[i] = 1;
                    }
                    inactive[i] = inactive[i] * keepMask[i];

                    for (int j = 0; j < subsetSize; j++)
                    {
                        //newSpiders[i].Attribute_Values[j] = fi.Binarize(newSpiders[i].Attribute_Values[j] * spiders[maxIndex[i]].Attribute_Values[j] * (1 - keepTarget[i]), rand.NextDouble()); //update solution
                        targetPosition[i, j] = targetPosition[i, j] * keepTarget[i] + spiders[maxIndex[i]].Attribute_Values[j] * (1 - keepTarget[i]);
                        //targetPosition[i, j] = targetPosition[i, j] * keepTarget[i] + spiders[maxIndex[i]].Attribute_Values_Continuous[j] * (1 - keepTarget[i]);
                        newMask[i, j] = Math.Ceiling(rand.NextDouble() + rand.NextDouble() * p_m - 1);
                        mask[i, j]    = keepMask[i] * mask[i, j] + (1 - keepMask[i]) * newMask[i, j]; //update dimension mask of spider
                    }
                }

                //Reshuffule the Spider solution
                //Method: randomly generated positions pointing to rows and columns in the solution space. With the pointers, we can acess indivdual indices(or positions) in the solution
                double[,] randPosition = GenerateRandomSpiderPosition(nSpiders, subsetSize, spiders);

                //generate psfo, and perform random walk
                double[,] followPosition = new double[nSpiders, subsetSize];
                for (int i = 0; i < nSpiders; i++)
                {
                    for (int j = 0; j < subsetSize; j++)
                    {
                        followPosition[i, j] = mask[i, j] * randPosition[i, j] + (1 - mask[i, j]) * targetPosition[i, j];
                        movement[i, j]       = rand.NextDouble() * movement[i, j] + (followPosition[i, j] - spiders[i].Attribute_Values[j]) * rand.NextDouble(); //perform random movement
                        //movement[i, j] = rand.NextDouble() * movement[i, j] + (followPosition[i, j] - spiders[i].Attribute_Values_Continuous[j]) * rand.NextDouble(); //perform random movement
                        //newSpiders[i].Attribute_Values[j] = fi.Binarize(newSpiders[i].Attribute_Values_Continuous[j] + movement[i, j], rand.NextDouble()); //actual random walk
                        newSpiders[i].Attribute_Values[j] = fi.Binarize(newSpiders[i].Attribute_Values[j] + movement[i, j], rand.NextDouble()); //actual random walk
                    }
                }

                //Select best solutions from the original population and matured population for the next generation;
                fpa.SelectBestSolution(spiders, newSpiders);

                //evaluate new solution
                newSpiderFitnessVal = EvaluateObjectiveFunction(newSpiders, prob);                                                             //evaluate fitness value for all the bats
                SpiderFitness(newSpiderFitnessVal, newSpiders);                                                                                //fitness value for new bats
                globalBestSpider = EvaluateSolution(spiderFitnessVal, newSpiderFitnessVal, globalBest, spiders, newSpiders, globalBestSpider); //get the global best flower
                globalBest       = globalBestSpider.Fitness;

                //if solution has converged to a optimal user-defined point, stop search
                int Max = 60;          // maximum percentage reduction
                if (globalBest >= Max) //if the percentage reduction has approached 60%, stop search!
                {
                    break;
                }
            }

            //ensure that at least, N instances are selected for classification
            int Min = 15; //minimum number of selected instances

            globalBestSpider = fpa.AddInstances(globalBestSpider, Min);

            Problem subBest = fi.buildModelMultiClass(globalBestSpider, prob); //build model for the best Instance Mast

            storagePercentage = Training.StoragePercentage(subBest, prob);     //calculate the percent of the original training set was retained by the reduction algorithm
            return(subBest);
        }
Example #13
0
        /// <summary>
        /// generating the initial locations of n Cuckoo
        /// </summary>
        public List <ObjectInstanceSelection> InitializeBinaryCuckoo(int nNests, int subsetSize, int probSize, Problem prob)
        {
            //Random rnd = new Random();
            //List<int> rNum = Training.GetRandomNumbers(probSize, probSize); //generate N random numbers

            List <ObjectInstanceSelection> attr_values = new List <ObjectInstanceSelection>();
            //int cnt1 = 0, cnt2 = 0, cnt3 = 0;
            //create an array of size n for x and y
            Random rnd = new Random();

            //List<int> rNum = Training.GetRandomNumbers(probSize, probSize); //generate N random numbers
            int[]    xn     = new int[subsetSize];    //instance mask
            double[] xn_Con = new double[subsetSize]; //instance mask continuous

            //int[] pointers = new int[subsetSize]; //array contain pointer to actual individual instance represented in the instance mask
            List <double> classes = fi.getClassLabels(prob.Y); //get the class labels
            int           nClass  = classes.Count;
            int           div     = subsetSize / nClass;

            //double freq = new double(); //initialize the frequency of all the bats to zero
            //double[] vel = new double[subsetSize]; //initialize the velocity of all the bats to zero

            //select pointers to instances for all the particles


            //int k = 0;
            if (nClass > 2)                                                                              //do this for multi-class problems
            {
                int[] pointers = Training.AssignClassPointers_MultipleClass(prob, subsetSize, probSize); //array contain pointer to actual individual instance represented in the instance mask
                for (int a = 0; a < nNests; a++)
                {
                    xn     = new int[subsetSize];    //instance mask
                    xn_Con = new double[subsetSize]; //instance mask continuous

                    for (int j = 0; j < subsetSize; j++)
                    {
                        xn[j] = rnd.Next(0, 2);
                    }

                    //Training.InstanceMask_MultipleClass(prob, subsetSize, probSize, out xn); //initialize instance mask
                    ObjectInstanceSelection OI = new ObjectInstanceSelection(xn, xn_Con, pointers, 0.0);
                    attr_values.Add(OI);
                }
            }
            else //do this for binary class problem
            {
                int[] pointers = Training.AssignClassPointersBinary(prob, probSize, subsetSize); //array contain pointer to actual individual instance represented in the instance mask
                for (int i = 0; i < nNests; i++)
                {
                    xn     = new int[subsetSize];
                    xn_Con = new double[subsetSize];
                    //pointers = new int[subsetSize];
                    //cnt1 = 0; cnt2 = 0; cnt3 = 0;

                    for (int j = 0; j < subsetSize; j++)
                    {
                        xn[j] = rnd.Next(0, 2);
                    }

                    //Training.InstanceMask_Binary(prob, subsetSize, pointers, out xn);
                    ObjectInstanceSelection OI = new ObjectInstanceSelection(xn, xn_Con, pointers, 0.0);
                    attr_values.Add(OI);

                    //for (int j = 0; j < prob.Count; j++)
                    //{
                    //    if (cnt1 < (0.7 * subsetSize) && prob.Y[rNum[j]] == -1) //select 70% positive instance of the subset
                    //    {
                    //        xn[cnt3] = rnd.Next(0, 2);
                    //        pointers[cnt3] = rNum[j];
                    //        k++; cnt1++; cnt3++;
                    //    }
                    //    else if (cnt2 < (0.3 * subsetSize) && prob.Y[rNum[j]] == 1)
                    //    {
                    //        xn[cnt3] = rnd.Next(0, 2);
                    //        pointers[cnt3] = rNum[j];
                    //        k++; cnt2++; cnt3++;
                    //    }
                    //    if (cnt3 >= subsetSize)
                    //        break;
                    //}
                }
            }

            return(attr_values);
        }
Example #14
0
        //flower pollination algorithm by Yang
        public Problem FlowerPollination(Problem prob)
        {
            int    nargin = 0, totalInstances = prob.X.Count(), maxGeneration = 500;
            int    numOfFlower       = 10;  //population size
            double probabilitySwitch = 0.8; //assign probability switch
            int    subsetSize        = 200; //dimension for each flower

            double[] flowerFitnessVal    = new double[numOfFlower];
            double[] newFlowerFitnessVal = new double[numOfFlower];
            FireflyInstanceSelection fw  = new FireflyInstanceSelection();
            double globalBest            = double.MinValue;
            double newBest = new double();
            ObjectInstanceSelection globalBestFlower = null;
            int lowerBound = -2; //set lower bound - lower boundary
            int upperBound = 2;  //set upper bound - upper boundary
            int maxIndex;

            //inittalize flowers, and get global best
            List <ObjectInstanceSelection> flowers    = InitializeFlower(numOfFlower, subsetSize, totalInstances, prob); //initialize solution
            List <ObjectInstanceSelection> newFlowers = new List <ObjectInstanceSelection>(flowers.Count);               //create a clone of flowers

            flowers.ForEach((item) =>
            {
                newFlowers.Add(new ObjectInstanceSelection(item.__Attribute_Values, item.__Attribute_Values_Continuous, item.__Pointers, item.__Fitness)); //create a clone of flowers
            });

            flowerFitnessVal    = fw.EvaluateObjectiveFunction(flowers, prob);                                                             //evaluate fitness value for all the flowers
            newFlowerFitnessVal = fw.EvaluateObjectiveFunction(newFlowers, prob);                                                          //evaluate fitness value for new flowers. Note: this will be the same for this function call, since pollination has not occur
            FlowerFitness(flowerFitnessVal, flowers);                                                                                      //fitness value for each flower
            FlowerFitness(newFlowerFitnessVal, newFlowers);                                                                                //fitness value for new flower
            globalBestFlower = EvaluateSolution(flowerFitnessVal, newFlowerFitnessVal, globalBest, flowers, newFlowers, globalBestFlower); //get the global best flower
            globalBest       = flowerFitnessVal.Max();

            //start flower algorithm
            Random r = new Random();

            double[] levy = new double[subsetSize];
            for (int i = 0; i < maxGeneration; i++)
            {
                double rand = r.NextDouble();
                if (rand > probabilitySwitch) //global pollination
                {
                    //global pollination
                    for (int j = 0; j < numOfFlower; j++)
                    {
                        levy = LevyFlight(subsetSize);
                        for (int k = 0; k < subsetSize; k++)
                        {
                            double A = levy[k] * (flowers[j].__Attribute_Values_Continuous[k] - globalBestFlower.__Attribute_Values_Continuous[k]);
                            double B = flowers[j].__Attribute_Values_Continuous[k] + A;
                            A = SimpleBounds(B, lowerBound, upperBound);                                     //ensure that value does not go beyond defined boundary
                            newFlowers[j].__Attribute_Values_Continuous[k] = A;
                            newFlowers[j].__Attribute_Values[k]            = fw.Binarize(B, r.NextDouble()); //convert to binary
                        }
                    }
                }
                else //local pollination
                {
                    for (int j = 0; j < numOfFlower; j++)
                    {
                        List <int> randNum = Training.GetRandomNumbers(2, numOfFlower); //generate 2 distinct random numbers
                        double     epsilon = rand;

                        //local pollination
                        for (int k = 0; k < subsetSize; k++)
                        {
                            double A = flowers[j].__Attribute_Values_Continuous[k] + epsilon * (flowers[randNum[0]].__Attribute_Values_Continuous[k] - flowers[randNum[1]].__Attribute_Values_Continuous[k]); //randomly select two flowers from neighbourhood for pollination
                            A = SimpleBounds(A, lowerBound, upperBound);                                                                                                                                      //ensure that value does not exceed defined boundary
                            newFlowers[j].__Attribute_Values_Continuous[k] = A;                                                                                                                               //save computation
                            newFlowers[j].__Attribute_Values[k]            = fw.Binarize(A, r.NextDouble());                                                                                                  //convert to binary
                        }
                    }
                }

                //evaluate new solution
                newFlowerFitnessVal = fw.EvaluateObjectiveFunction(newFlowers, prob);                                                          //evaluate fitness value for all the flowers
                FlowerFitness(newFlowerFitnessVal, newFlowers);                                                                                //fitness value for new flower
                globalBestFlower = EvaluateSolution(flowerFitnessVal, newFlowerFitnessVal, globalBest, flowers, newFlowers, globalBestFlower); //Evaluate solution, update better solution and get global best flower
                globalBest       = flowerFitnessVal.Max();
            }

            //ensure that at least, 40 instances is selected for classification
            int countSelected = globalBestFlower.__Attribute_Values.Count(q => q == 1); //count the total number of selected instances
            int diff, c = 0, d = 0;
            int Min = 40;                                                               //minimum number of selected instances

            if (countSelected < Min)
            {
                //if there are less than N, add N instances, where N = the number of selected instances
                diff = Min - countSelected;
                while (c < diff)
                {
                    if (globalBestFlower.__Attribute_Values[d++] == 1)
                    {
                        continue;
                    }
                    else
                    {
                        globalBestFlower.__Attribute_Values[d++] = 1;
                        c++;
                    }
                }
            }

            Problem subBest = fw.buildModel(globalBestFlower, prob); //build model for the best Instance Mast

            return(subBest);
        }
Example #15
0
        //flower pollination algorithm by Yang
        public Problem BinaryFlowerPollination(Problem prob, out double storagePercentage)
        {
            int    nargin = 0, totalInstances = prob.X.Count();
            int    maxGeneration     = 3;
            int    numOfFlower       = 3;   //population size
            int    subsetSize        = 100; //dimension for each flower
            double probabilitySwitch = 0.8; //assign probability switch

            double[] flowerFitnessVal    = new double[numOfFlower];
            double[] newFlowerFitnessVal = new double[numOfFlower];

            double globalBest = double.MinValue;
            double newBest    = new double();
            ObjectInstanceSelection globalBestFlower = null;
            int lowerBound = -2; //set lower bound - lower boundary
            int upperBound = 2;  //set upper bound - upper boundary
            int maxIndex;

            //inittalize flowers, and get global best
            List <ObjectInstanceSelection> flowers    = InitializeBinaryFlower(numOfFlower, subsetSize, totalInstances, prob); //initialize solution
            List <ObjectInstanceSelection> newFlowers = new List <ObjectInstanceSelection>(flowers.Count);                     //create a clone of flowers

            flowers.ForEach((item) =>
            {
                newFlowers.Add(new ObjectInstanceSelection(item.__Attribute_Values, item.__Attribute_Values_Continuous, item.__Pointers, item.__Fitness)); //create a clone of flowers
            });

            flowerFitnessVal    = EvaluateObjectiveFunction(flowers, prob);                                                                //evaluate fitness value for all the flowers
            newFlowerFitnessVal = EvaluateObjectiveFunction(newFlowers, prob);                                                             //evaluate fitness value for new flowers. Note: this will be the same for this function call, since pollination has not occur
            FlowerFitness(flowerFitnessVal, flowers);                                                                                      //fitness value for each flower
            FlowerFitness(newFlowerFitnessVal, newFlowers);                                                                                //fitness value for new flower
            globalBestFlower = EvaluateSolution(flowerFitnessVal, newFlowerFitnessVal, globalBest, flowers, newFlowers, globalBestFlower); //get the global best flower
            globalBest       = flowerFitnessVal.Max();

            //start flower algorithm
            Random r = new Random(); int x = 0;

            double[] levy = new double[subsetSize];
            for (int i = 0; i < maxGeneration; i++)
            {
                double rand = r.NextDouble();
                if (rand > probabilitySwitch) //do global pollination, to produce new pollen solution
                {
                    levy = LevyFlight(subsetSize);
                    for (int j = 0; j < numOfFlower; j++)
                    {
                        for (int k = 0; k < subsetSize; k++)
                        {
                            double A = levy[k] * (flowers[j].Attribute_Values[k] - globalBestFlower.Attribute_Values[k]);
                            double B = flowers[j].Attribute_Values[k] + A; //new pollen solution
                            //double A = levy[k] * (flowers[j].Attribute_Values_Continuous[k] - globalBestFlower.Attribute_Values_Continuous[k]);
                            //double B = flowers[j].Attribute_Values_Continuous[k] + A;
                            newFlowers[j].Attribute_Values[k] = ConvertToBinary(B, r.NextDouble()); //convert to binary

                            //newFlowers[j].__Attribute_Values[k] = TransferFunction(B, newFlowers[j].__Attribute_Values[k]); //update flower position in the binary space
                        }
                        List <int> randNum = Training.GetRandomNumbers(2, numOfFlower); //generate 2 distinct random numbers
                        for (int k = 0; k < subsetSize; k++)
                        {
                            double A = flowers[j].Attribute_Values[k] + (r.NextDouble() * (flowers[randNum[0]].Attribute_Values[k] - flowers[randNum[1]].Attribute_Values[k])); //randomly select two flowers from neighbourhood for pollination
                            //double A = flowers[j].Attribute_Values_Continuous[k] + r.NextDouble() * (flowers[randNum[0]].Attribute_Values_Continuous[k] - flowers[randNum[1]].Attribute_Values_Continuous[k]); //randomly select two flowers from neighbourhood for pollination
                            newFlowers[j].Attribute_Values[k] = ConvertToBinary(A, r.NextDouble());                                                                             //convert to binary

                            //newFlowers[j].__Attribute_Values[k] = TransferFunction(A, newFlowers[j].__Attribute_Values[k]); //update flower position in the binary space
                        }
                    }
                }
                else // //do local pollination, to produce new pollen solution
                {
                    for (int j = 0; j < numOfFlower; j++)
                    {
                        List <int> randNum = Training.GetRandomNumbers(2, numOfFlower); //generate 2 distinct random numbers
                        for (int k = 0; k < subsetSize; k++)
                        {
                            double A = flowers[j].Attribute_Values[k] + r.NextDouble() * (flowers[randNum[0]].Attribute_Values[k] - flowers[randNum[1]].Attribute_Values[k]); //randomly select two flowers from neighbourhood for pollination
                            //double A = flowers[j].Attribute_Values_Continuous[k] + r.NextDouble() * (flowers[randNum[0]].Attribute_Values_Continuous[k] - flowers[randNum[1]].Attribute_Values_Continuous[k]); //randomly select two flowers from neighbourhood for pollination
                            newFlowers[j].Attribute_Values[k] = ConvertToBinary(A, r.NextDouble());                                                                           //convert to binary

                            //newFlowers[j].__Attribute_Values[k] = TransferFunction(A, newFlowers[j].__Attribute_Values[k]); //update flower position in the binary space
                        }
                    }
                }

                //Select best solutions from the original population and matured population for the next generation;
                SelectBestSolution(flowers, newFlowers);

                //evaluate new solution
                newFlowerFitnessVal = EvaluateObjectiveFunction(newFlowers, prob);                                                             //evaluate fitness value for all the flowers
                FlowerFitness(newFlowerFitnessVal, newFlowers);                                                                                //fitness value for new flower
                globalBestFlower = EvaluateSolution(flowerFitnessVal, newFlowerFitnessVal, globalBest, flowers, newFlowers, globalBestFlower); //Evaluate solution, update better solution and get global best flower
                globalBest       = globalBestFlower.Fitness;

                //if solution has converged to a optimal user-defined point, stop search
                int Max = 60;          // maximum percentage reduction
                if (globalBest >= Max) //if the percentage reduction has approached 60%, stop search!
                {
                    break;
                }
            }

            //ensure that at least, N instances are selected for classification
            int min = 15; //minimum number of selected instances

            globalBestFlower = AddInstances(globalBestFlower, min);

            Problem subBest = fi.buildModelMultiClass(globalBestFlower, prob); //build model for the best Instance Mast

            storagePercentage = Training.StoragePercentage(subBest, prob);     //calculate the percent of the original training set was retained by the reduction algorithm
            return(subBest);
        }
        //build model for multi class problems
        public Problem buildModelMultiClass(ObjectInstanceSelection firefly, Problem prob)
        {
            int           tNI = firefly.Attribute_Values.Count(); //size of each Instance Mask
            List <double> y = new List <double>();
            List <Node[]> x = new List <Node[]>();
            bool          pos = false, neg = false;
            List <double> classes = getClassLabels(prob.Y); //get the class labels
            int           nClass  = classes.Count;          //count the number of classes

            int[] classCount = new int[nClass];
            //building model for each instance in instance mask in each firefly object
            for (int j = 0; j < tNI; j++)
            {
                if (firefly.__Attribute_Values[j] == 1) //if instance is selected, use for classification
                {
                    int p = firefly.__Pointers[j];
                    x.Add(prob.X[p]);
                    y.Add(prob.Y[p]);

                    for (int i = 0; i < nClass; i++)
                    {
                        if (prob.Y[p] == classes[i])
                        {
                            classCount[i]++; //count the total number of instances in each class
                        }
                    }
                }
                else
                {
                    continue;
                }
            }

            Node[][] X = new Node[x.Count][];
            double[] Y = new double[y.Count];

            //ensuring that the subproblem consist of both positive and negative instance
            int k = 0;

            if (classCount.Sum() == 0) //if the sum is zero, then no instance was selected
            {
                return(null);
            }
            else //ensure that instance mask contains at least, one of each class instance
            {
                for (int a = 0; a < nClass; a++)
                {
                    if (classCount[a] == 0)
                    {
                        int m = 0;
                        for (int i = 0; i < prob.Count; i++) //if no instance in this class, search the whole subproblem and insert one instance in the kth position of subproblem
                        {
                            if (prob.Y[i] == classes[a])
                            {
                                x[k] = prob.X[i]; //insert negative instance in the first and second position
                                y[k] = prob.Y[i]; //insert label
                                k++; m++;
                            }
                            if (m == 2)
                            {
                                break;
                            }
                        }
                    }
                }
            }

            x.CopyTo(X); //convert from list to double[] array
            y.CopyTo(Y);
            Problem subProb = new Problem(X.Count(), Y, X, X[0].GetLength(0));

            return(subProb);
        }
        /// <summary>
        /// Main part of the Firefly Algorithm
        /// </summary>
        //public Problem firefly_simple(List<double> avgAcc, List<double> CValues, List<double> GValues, Problem prob)
        public Problem firefly_simple(Problem prob, out double storagePercentage)
        {
            //int nF = 9; //number of instances
            int nI            = prob.X.Count(); //total number of instance in dataset
            int nFF           = 5;              //number of fireflies. Note: NFF * subsetsize must not be greater than Size of training dataset
            int subsetSize    = 100;            //size of each firefly Instance Mask
            int MaxGeneration = 5;              //number of pseudo time steps

            int[] range = new int[4] {
                -5, 5, -5, 5
            };                  //range=[xmin xmax ymin ymax]

            double alpha = 0.2; //Randomness 0--1 (highly random)
            double gamma = 1.0; //Absorption coefficient

            int[]    xn     = new int[subsetSize];
            double[] xo     = new double[subsetSize];
            double[] Lightn = new double[nFF];
            double[] Lighto = new double[nFF];

            double[] fitnessVal = new double[nFF];
            double   globalbestIntensity;
            ObjectInstanceSelection globalBest = null;


            //generating the initial locations of n fireflies
            List <ObjectInstanceSelection> fireflies = init_ffa(nFF, subsetSize, nI, prob);

            ObjectInstanceSelection[] fireflyBackup = new ObjectInstanceSelection[fireflies.Count];
            ObjectInstanceSelection[] fireflyBest   = new ObjectInstanceSelection[fireflies.Count];
            List <int> changedIndex     = new List <int>(); //changedIndex keeps track of the index of fireflies that has been moved
            double     newBestIntensity = new double();
            int        maxIndex;
            bool       stopSearch = false; //stopsearch is will be set to true when the a firefly with classification accuracy = 100 is found.

            globalbestIntensity = double.MinValue;

            //Iterations or pseudo time marching
            for (int i = 0; i < MaxGeneration; i++)
            {
                //Evaluate objective function
                fitnessVal = this.EvaluateObjectiveFunction(fireflies, prob); //evaluate objective function for each firefly

                //stop searching if firefly has found the best c and G value that yields 100%
                for (int t = 0; t < fitnessVal.Count(); t++)
                {
                    //double predAccr = avgAcc[changedIndex[t]] * 100;
                    double predAccr = fitnessVal[t] * 100;
                    if (predAccr == 100) //if prediction accuracy is equal to 100, stop searching and select the firefly that gives this accuracy
                    {
                        globalBest = fireflies[changedIndex[t]];
                        stopSearch = true;
                        break;
                    }
                }

                //stop searching if firefly has found the best c and G value that yields 100%
                if (stopSearch == true)
                {
                    break;
                }

                //fitnessVal = this.EvaluateObjectiveFunction(fireflies, avgAcc, prob); //evaluate objective function for each firefly
                newBestIntensity = fitnessVal.Max(); //get the firefly with the highest light intensity
                if (newBestIntensity > globalbestIntensity)
                {
                    globalbestIntensity = newBestIntensity;
                    maxIndex            = Array.IndexOf(fitnessVal, newBestIntensity); //select the index for the global best
                    globalBest          = fireflies[maxIndex];                         //select the global best firefly
                    //bestC = (double)fireflies[maxIndex].cValue; //save the C value for the global best
                    //bestGamma = (double)fireflies[maxIndex].GValue; //save the Gamma for the global best
                }

                fireflies.CopyTo(fireflyBackup); fitnessVal.CopyTo(Lighto, 0); fitnessVal.CopyTo(Lightn, 0); //creating duplicates
                //Lightn.CopyTo(Lighto, 0);

                changedIndex.Clear();
                ffa_move(Lightn, fireflyBackup, Lighto, alpha, gamma, fireflies, prob);

                fireflies.CopyTo(fireflyBackup); //backing up the current positions of the fireflies
                Lightn.CopyTo(Lighto, 0);        //backing up the current intensities of the fireflies
            }

            //ensure that at least, 40 instances is selected for classification
            int countSelected = globalBest.__Attribute_Values.Count(q => q == 1); //count the total number of selected instances
            int diff, c = 0, d = 0;
            int Min = 15;                                                         //minimum number of selected instances

            if (countSelected < Min)
            {
                diff = Min - countSelected;
                //if there are less than 40, add N instances, where N = the number of selected instances and 40
                while (c < diff)
                {
                    if (globalBest.__Attribute_Values[d++] == 1)
                    {
                        continue;
                    }
                    else
                    {
                        globalBest.__Attribute_Values[d++] = 1;
                        c++;
                    }
                }
            }

            Problem subBest = buildModelMultiClass(globalBest, prob);      //model for the best Instance Mast

            storagePercentage = Training.StoragePercentage(subBest, prob); //calculate the percent of the original training set was retained by the reduction algorithm

            return(subBest);
        }
        //build model for binary problems
        public Problem buildModel(ObjectInstanceSelection firefly, Problem prob)
        {
            int           tNI = firefly.Attribute_Values.Count(); //size of each Instance Mask
            List <double> y = new List <double>();
            List <Node[]> x = new List <Node[]>();
            bool          pos = false, neg = false;

            //building model for each instance in instance mask in each firefly object
            for (int j = 0; j < tNI; j++)
            {
                if (firefly.__Attribute_Values[j] == 1) //if instance is selected, use for classification
                {
                    int p = firefly.__Pointers[j];
                    x.Add(prob.X[p]);
                    y.Add(prob.Y[p]);

                    if (prob.Y[p] == 1)
                    {
                        pos = true;
                    }
                    else if (prob.Y[p] == -1)
                    {
                        neg = true;
                    }
                }
                else
                {
                    continue;
                }
            }

            Node[][] X = new Node[x.Count][];
            double[] Y = new double[y.Count];

            //ensuring that the subproblem consist of both positive and negative instance
            int k      = 0;
            int countP = y.Count(r => r == 1);  //counting the total number of positive instance in the subpeoblem
            int countN = y.Count(r => r == -1); //counting the total number of negative instance in the subproble

            if (pos == false && neg == false)   //if no instance (positive and negative) was selected, return null. Don't perform any computation
            {
                return(null);
            }
            else if (pos == false || countP <= 1)    //if pos == false, then no positive instance is in the subproblem
            {
                for (int i = 0; i < prob.Count; i++) //if no positive instance, search the whole subproblem and insert two postive instance in the first and second position of subproblem
                {
                    if (prob.Y[i] == 1)
                    {
                        x[k] = prob.X[i]; //insert negative instance in the first and second position
                        y[k] = prob.Y[i]; //insert label
                        k++;
                    }
                    if (k == 2)
                    {
                        break;
                    }
                }
            }
            else if (neg == false || countN <= 1) //if neg == false, then no negative instance is in the subproblem
            {
                k = 0;
                for (int i = 0; i < prob.Count; i++) //if no negative instance, search the whole subproblem and insert two negative instances in the first and second position of subproblem
                {
                    if (prob.Y[i] == -1)
                    {
                        x[k] = prob.X[i]; //insert negative instance in the first and second position
                        y[k] = prob.Y[i]; //insert label
                        k++;
                    }
                    if (k == 2)
                    {
                        break;
                    }
                }
            }

            x.CopyTo(X); //convert from list to double[] array
            y.CopyTo(Y);
            Problem subProb = new Problem(X.Count(), Y, X, X[0].GetLength(0));

            return(subProb);
        }