// =============================================== static int betterThan(Fitness f1, Fitness f2) { int n, n1, n2; if (f1.size == 1) goto fitness; // No constraints (except the search space) // Criterion "Number of respected constraints n1 = 0; n2 = 0; for (n = 1; n < f1.size; n++) { if (f1.f[n] < 0) n1 = n1 + 1; if (f2.f[n] < 0) n2 = n2 + 1; } if (n1 > n2) return 1; if (n1 < n2) return 0; // Here, n1=n2 fitness: // Criterion "Total Fitness" if (f1.errorFC() < f2.errorFC() - Constants.Zero) return 1; return 0; }
public static Fitness Constraint(Position x, int functCode, double epsConstr) { // ff[0] is defined in perf() // Variables specific to Coil compressing spring const double fmax = 1000.0; const double fp = 300; double Cf; double K; double sp; double lf; const double S = 189000.0; const double lmax = 14.0; const double spm = 6.0; const double sw = 1.25; const double G = 11500000; Fitness ff = new Fitness(Constants.DMax) {size = 1}; switch (functCode) { case 7: ff.size = 4; ff.f[1] = 0.0193 * x.x[2] - x.x[0]; ff.f[2] = 0.00954 * x.x[2] - x.x[1]; ff.f[3] = 750 * 1728 - Math.PI * x.x[2] * x.x[2] * (x.x[3] + (4.0 / 3) * x.x[2]); break; case 8: ff.size = 5; Cf = 1 + 0.75 * x.x[2] / (x.x[1] - x.x[2]) + 0.615 * x.x[2] / x.x[1]; K = 0.125 * G * Math.Pow(x.x[2], 4) / (x.x[0] * x.x[1] * x.x[1] * x.x[1]); sp = fp / K; lf = fmax / K + 1.05 * (x.x[0] + 2) * x.x[2]; ff.f[1] = 8 * Cf * fmax * x.x[1] / (Math.PI * x.x[2] * x.x[2] * x.x[2]) - S; ff.f[2] = lf - lmax; ff.f[3] = sp - spm; ff.f[4] = sw - (fmax - fp) / K; break; case 15: ff.size = 4; ff.f[1] = Math.Abs(x.x[0] * x.x[0] + x.x[1] * x.x[1] + x.x[2] * x.x[2] + x.x[3] * x.x[3] + x.x[4] * x.x[4] - 10) - epsConstr; // Constraint h1<=eps ff.f[2] = Math.Abs(x.x[1] * x.x[2] - 5 * x.x[3] * x.x[4]) - epsConstr; // Constraint h2<=eps; ff.f[3] = Math.Abs(Math.Pow(x.x[0], 3) + Math.Pow(x.x[1], 3) + 1) - epsConstr; // Constraint h3<=eps break; } return ff; }
// =========================================================== static Result PSO(Parameters parameters, IProblem pb, int level) { int added; // For information Fitness error = new Fitness(Constants.fMax); Fitness errorInit = new Fitness(Constants.fMax); // Just for information Fitness errorPrev = new Fitness(Constants.fMax); double errorTot; int g; int sBest; // Rank in g of the best of the bests int improvTot; // Number of particles that have improved their previous best int[] index = new int[Constants.SMax]; int initLinks; // Flag to (re)init or not the information links int initLinkNb; int iterBegin; int[,] LINKS = new int[Constants.SMax, Constants.SMax]; // Information links int m; int moved; int n; int noStop; Result R = new Result(); int removed; // For information int s0 = 0; int s, s1, s2; int spread; int stagnation = 0; int swarmMod; int sWorst; XV xvNorm = new XV(); // ----------------------------------------------------- // INITIALISATION R.SW.S = parameters.S; // Initial size of the swarm memPos.Rank = 0; // Rank (in M) where to memorise a new position memPos.Size = 0; // Number of memorised positions // Positions for (s = 0; s < R.SW.S; s++) { R.SW.X[s] = Position.Initialize(pb.SwarmSize); memPos.memSave(R.SW.X[s]); // Save the position } // Velocities for (s = 0; s < R.SW.S; s++) { R.SW.V[s] = Velocity.Initialize(R.SW.X[s], pb.SwarmSize); } // Discrete values // Note: may be removed if you are sure that the initialisation // takes discretisation into account (or if there is none) for (s = 0; s < R.SW.S; s++) { R.SW.X[s] = Position.Discrete(R.SW.X[s], pb); } // Note: at this point no confinement is needed: // initialisation is supposed to be OK from this point of view // (but some constraints may be not respected) // First evaluations for (s = 0; s < R.SW.S; s++) { R.SW.X[s].f = pb.Evaluate(R.SW.X[s]); R.SW.P[s] = R.SW.X[s]; // Best position = current one } // Save the positions for (s = 0; s < R.SW.S; s++) memPos.memSave(R.SW.X[s]); // Find the best R.SW.best = best(R.SW); error = R.SW.P[R.SW.best].f; // Display the best Console.WriteLine("Best value after init. {0} ", R.SW.P[R.SW.best].f.f[0]); if (pb.Constraint > 0) { Console.WriteLine("Constraints (should be < 0) "); for (n = 0; n < pb.Constraint; n++) Console.WriteLine("{0} ", error.f[n + 1]); } //fprintf(f_run,"\n Best value after init. %f ", errorPrev ); //printf( "\n Position :\n" ); //for ( d = 0; d < pb.SwarmSize.D; d++ ) printf( " %f", R.SW.P[R.SW.best].x[d] ); initLinks = 1; // So that information links will beinitialized initLinkNb = 0; // Count the number of iterations between two reinit of the links iter = 0; nEval = 0; noStop = 0; added = 0; removed = 0; // For information spread = spreadIter(parameters.spreadProba, R.SW.S, parameters.formula); // Number of iterations // needed to "spread" the information errorInit = error; // For information // ---------------------------------------------- ITERATIONS while (noStop == 0) { //printf("\niter %i",iter); //fprintf(f_run,"\niter %i",iter); iter = iter + 1; errorPrev = error; Alea.Shuffle(index, R.SW.S); // Random numbering of the particles if (initLinks == 1) // Bidirectional ring topology. Randomly built { initLinks = 0; initLinkNb = 0; // Count the number of iterations since the last re-init // of the links // Init to zero (no link) for (s = 0; s < R.SW.S; s++) { for (m = 0; m < R.SW.S; m++) LINKS[m, s] = 0; } // Information links (bidirectional ring) for (s = 0; s < R.SW.S - 1; s++) { LINKS[index[s], index[s + 1]] = 1; LINKS[index[s + 1], index[s]] = 1; } LINKS[index[0], index[R.SW.S - 1]] = 1; LINKS[index[R.SW.S - 1], index[0]] = 1; // Each particle informs itself for (m = 0; m < R.SW.S; m++) LINKS[m, m] = 1; } // Loop on particles, for move improvTot = 0; for (s0 = 0; s0 < R.SW.S; s0++) { s = index[s0]; // Find the best informant g = s; for (m = 0; m < R.SW.S; m++) { if (m == s) continue; if (LINKS[m, s] == 1 && betterThan(R.SW.P[m].f, R.SW.P[g].f) == 1) g = m; } // Move xvNorm = move(R, s, g, pb, parameters); xvNorm.x = Position.Discrete(xvNorm.x, pb); // Confinement and evaluation xvNorm.Confinement(pb); // New position and new velocity R.SW.X[s] = xvNorm.x; R.SW.V[s] = xvNorm.v; // Update the best previous position if (betterThan(R.SW.X[s].f, R.SW.P[s].f) == 1) // Improvement of the previous best { R.SW.P[s] = R.SW.X[s].Clone(); improvTot++; // Increase the number of improvements during this iteration // Memorise the improved position memPos.memSave(R.SW.P[s]); // Update the best of the bests if (betterThan(R.SW.P[s].f, R.SW.P[R.SW.best].f) == 1) R.SW.best = s; } // Decide to stop or not errorTot = R.SW.P[R.SW.best].f.errorFC(); if (errorTot > pb.Epsilon && nEval < pb.EvaluationMaximum) noStop = 0; // Won't stop else // Failure { noStop = 1; // Will stop goto end; } } // End of "for (s0=0 ... " = end of the iteration (move) /*-------------------------------------------------- Adaptations Rule 1: Check every "spread" iterations after each re-init of the links If no improvement of the global best => reinit links before the next iteration Rule 2: if no improvement of the global best during "spread" iterations => Try to add a particle (and initialise it in a non-searched area) => re-init links before the next iteration Note that the condition is slightly different from the one of Rule 1 Rule 3: if "enough" local improvements during the iteration => try to remove a particle (keep at least D+1 ones) */ // Rule 1 - Re-initializing the information links // Check if improvement since last re-init of the links initLinkNb = initLinkNb + 1; // Number of iterations since the last check if (initLinkNb >= spread) // It's time to check { initLinkNb = 0; // Reset to zero the number of iterations since the last check // The swarm size may have been modified, so must be "spread" spread = spreadIter(parameters.spreadProba, R.SW.S, parameters.formula); if (betterThan(error, errorPrev) == 1) // Improvement initLinks = 0; // No need of structural adaptation else // No improvement initLinks = 1; // Information links will be reinitialized } else initLinks = 0; // To early, no need to check sWorst = worst(R.SW); // Rank of the worst particle, before any adaptation // Rule 2 - Adding a particle // Check global stagnation (improvement of the global best) if (betterThan(R.SW.P[R.SW.best].f, errorPrev) == 1) stagnation = 0; // Improvement else stagnation++; // No improvement during this iteration swarmMod = 0; // Information flag if (stagnation >= spread) // Too many iterations without global improvement // => add a particle { if (R.SW.S < Constants.SMax) // if not too many particles { s = R.SW.S; R.SW.X[s] = memPos.InitializeFar(pb); // Init in a non-searched area R.SW.X[s] = Position.Discrete(xvNorm.x, pb); // If discrete search space R.SW.X[s].f = pb.Evaluate(R.SW.X[s]); // Evaluation R.SW.V[s] = Velocity.Initialize(R.SW.X[s], pb.SwarmSize); // Init velocity R.SW.P[s] = R.SW.X[s].Clone(); // Previous best = current position R.SW.S = R.SW.S + 1; // Increase the swarm size //fprintf(f_swarm,"%i %i %f\n",iter, R.SW.S,error.f[0]); // Count the number of added particles (for information) added++; initLinks = 1; // Links will be reinitialised stagnation = 0; // Reset the count for stagnation swarmMod = 1; // A particle has been added //printf("\n iter %i, added %i, S %i, spread %i",iter, added, R.SW.S, spread); } } // Rule 3 - Removing a particle // If enough improvements of some particles, remove the worst // (but keep at least D+1 particles) // NOTE: this is "the worst" without taking into account the particle // that has (possibly) been added // NOTE: it is perfectly possible to have a particle added // (because of no improvement of the global best) AND // a particle removed (because enough _local_ improvements) if (R.SW.S > pb.SwarmSize.D + 1 && improvTot > 0.5 * R.SW.S) { if ((swarmMod == 0 && sWorst < R.SW.S - 1) || swarmMod == 1) // if the worst is not the last { R.SW.P[sWorst] = R.SW.P[R.SW.S - 1]; // ... replace it by the last R.SW.V[sWorst] = R.SW.V[R.SW.S - 1]; R.SW.X[sWorst] = R.SW.X[R.SW.S - 1]; // Compact the matrix of the links for (s1 = 0; s1 < R.SW.S; s1++) // For each line, compact the columns for (s2 = sWorst; s2 < R.SW.S - 1; s2++) LINKS[s1, s2] = LINKS[s1, s2 + 1]; for (s2 = 0; s2 < R.SW.S - 1; s2++) // For each column, compact the lines for (s1 = sWorst; s1 < R.SW.S - 1; s1++) LINKS[s1, s2] = LINKS[s1 + 1, s2]; } R.SW.S = R.SW.S - 1; // Decrease the swarm size if (s < R.SW.best) R.SW.best = R.SW.best - 1; // The rank of the best may // have been modified // Count the number of removed particles (for information) removed++; swarmMod = -1; // A particle has been remowed //printf("\n iter %i, removed %i, S %i, spread %i",iter, removed, R.SW.S, spread); } // Save on a the result of the iteration on a file if (swarmMod != 0) { //fprintf(f_swarm,"%i %i %f\n",iter, R.SW.S,R.SW.P[R.SW.best].f.f[0]); } // End of the iteration end: ; } // End of "while (noStop==0)" // Convergence rate, just for information R.convRate = (errorInit.f[0] - R.SW.P[R.SW.best].f.f[0]) / errorInit.f[0]; // Information about the evolution of the swarm size Console.WriteLine("{0} iterations, +{1} -{2} particles", iter, added, removed); // Final number of evaluations R.nEval = nEval; // Final fitness R.error = R.SW.P[R.SW.best].f; return R; }
public double[] x; // Coordinates #endregion Fields #region Constructors public Position(int dMax) { x = new double[dMax]; f = new Fitness(dMax); size = 0; }