public int CompareTo(object obj)
        {
            CMAESCandidate other = (CMAESCandidate)obj;

            return(this.ObjFunctionValue.CompareTo(other.ObjFunctionValue)); //sorts in acsending, array index of 0 with be highest.
                                                                             // returns 0 if equal, -1 if less, and 1 if greater.
        }
示例#2
0
 public VariantData(int coreNumIN, int currentEvalIN, int dimensionNumberIN, int lambdaValIN, Matrix <double> xMeanIN, Matrix <double> dMatrixIN, Matrix <double> bMatrixIN, double sigmaIN, Matrix <double> weightsIN, Matrix <double> psIN, double csIN, double mueffIN, double ccIN, Matrix <double> invSqrtCIN, Matrix <double> matrixCIN, double muIN, Matrix <double> pcIN, double c1IN, double cmuIN, double dampsIN, double chiNIN, double eigenValIN, CMAESCandidate globalBestIN)
 {
     coreNumber      = coreNumIN;
     currentEval     = currentEvalIN;
     dimensionNumber = dimensionNumberIN;
     lambdaVal       = lambdaValIN;
     xMean           = xMeanIN;
     dMatrix         = dMatrixIN;
     bMatrix         = bMatrixIN;
     weights         = weightsIN;
     ps         = psIN;
     cs         = csIN;
     mueff      = mueffIN;
     cc         = ccIN;
     pc         = pcIN;
     invSqrtC   = invSqrtCIN;
     C          = matrixCIN;
     mu         = muIN;
     c1         = c1IN;
     cmu        = cmuIN;
     damps      = dampsIN;
     chiN       = chiNIN;
     eigenVal   = eigenValIN;
     globalBest = globalBestIN;
     sigma      = sigmaIN;
 }
示例#3
0
        //Attack different XOR APUFs (same type) in parallel
        //public static void RepeatAttackOnePUFType()
        //{
        //    int attackNumber = 40;

        //    double[] currentAccuracies = ClassicalAttackXORAPUFMulti(bitNumber, xorNumber, AppConstants.CoreNumber);
        //}

        //int bitNumber = 128;
        //int xorNumber = 4;
        //Runs attack multiple times, each time it is on a DIFFERENT XOR APUF
        public static double[] ClassicalAttackXORAPUFMulti(int bitNumber, int numXOR, int attackRepeatNumber)
        {
            //Generate a PUF
            double aPUFMean      = 0.0;
            double aPUFVar       = 1.0;
            double aPUFMeanNoise = 0.0;
            double aPUFNoiseVar  = 0.0;

            //Create the XOR APUF for parallel runs
            XORArbiterPUF xPUF = new XORArbiterPUF(numXOR, bitNumber, aPUFMean, aPUFVar, aPUFMeanNoise, aPUFNoiseVar);

            XORArbiterPUF[] xArray = new XORArbiterPUF[attackRepeatNumber];
            for (int i = 0; i < xArray.Length; i++)
            {
                xArray[i] = new XORArbiterPUF(numXOR, bitNumber, aPUFMean, aPUFVar, aPUFMeanNoise, aPUFNoiseVar);
            }

            sbyte[][] trainingData         = new sbyte[AppConstants.TrainingSize][]; //these will be phi vectors
            sbyte[][] allTrainingResponses = new sbyte[attackRepeatNumber][];        //first index PUF, second index sample

            for (int i = 0; i < attackRepeatNumber; i++)
            {
                allTrainingResponses[i] = new sbyte[AppConstants.TrainingSize];
            }
            Random[] rGenArray = new Random[AppConstants.CoreNumber];
            for (int i = 0; i < AppConstants.CoreNumber; i++)
            {
                rGenArray[i] = new Random((int)DateTime.Now.Ticks);
                System.Threading.Thread.Sleep(10); //prevent the random number generators from being the same
            }
            DataGeneration.GenerateTrainingDataParallel(xArray, trainingData, allTrainingResponses, rGenArray);
            Console.Out.WriteLine("Data Generation Complete.");

            //create the objective function for parallel runs
            ObjectiveFunctionResponseXOR[] rObjArray = new ObjectiveFunctionResponseXOR[attackRepeatNumber];
            for (int i = 0; i < rObjArray.Length; i++)
            {
                rObjArray[i] = new ObjectiveFunctionResponseXOR();
            }
            double[][] solutionList = new double[attackRepeatNumber][];

            Random[] randomGeneratorArray = new Random[attackRepeatNumber];
            for (int r = 0; r < attackRepeatNumber; r++)
            {
                randomGeneratorArray[r] = new Random((int)DateTime.Now.Ticks);
                System.Threading.Thread.Sleep(10); //prevent the random number generators from being the same
            }

            //time to save the invariant data
            //if (AppConstants.IsLargeData == false)
            //{
            InvariantData invD      = new InvariantData(trainingData, allTrainingResponses, xArray);
            string        dayString = System.DateTime.Today.ToString();

            dayString = dayString.Replace(@"/", "-");
            dayString = dayString.Replace(" ", string.Empty);
            dayString = dayString.Replace(":", string.Empty);
            string          invariantDataFileName = "InvariantData" + dayString;
            string          fName = AppConstants.SaveDir + invariantDataFileName;
            FileInfo        fi    = new FileInfo(fName);
            Stream          str   = fi.Open(FileMode.OpenOrCreate, FileAccess.Write);
            BinaryFormatter bf    = new BinaryFormatter();

            invD.Serialize(bf, str);
            str.Close();

            var watch = System.Diagnostics.Stopwatch.StartNew();

            Parallel.For(0, attackRepeatNumber, a =>
            {
                Random randomGenerator   = randomGeneratorArray[a];                 //remove the dependences for parallelization
                int dimensionNumber      = (bitNumber + 1) * xArray[a].GetPUFNum(); //the weights of all the XOR APUFs
                sbyte[] trainingResponse = allTrainingResponses[a];
                //Generate the first solution randomly for CMA-ES
                double[] firstSolution = new double[dimensionNumber];
                for (int i = 0; i < firstSolution.Length; i++)
                {
                    firstSolution[i] = randomGenerator.NextDouble();
                }
                Console.Out.WriteLine("Beginning CMA-ES run # " + a.ToString());
                //CMAESCandidate solutionCMAES = CMAESMethods.ComputeCMAES(dimensionNumber, rObjArray[a], trainingData, trainingResponse, firstSolution, randomGenerator);
                CMAESCandidate solutionCMAES = CMAESMethods.ComputeCMAESRecoverable(dimensionNumber, rObjArray[a], trainingData, trainingResponse, firstSolution, randomGenerator, a);
                double solutionVal           = solutionCMAES.GetObjectiveFunctionValue();
                solutionList[a] = solutionCMAES.GetWeightVector(); //store the solution in independent memory
                Console.Out.WriteLine("CMA-ES on core " + a.ToString() + " finished.");
            });
            watch.Stop();
            Console.Out.WriteLine("Elapsed Time is " + watch.ElapsedMilliseconds.ToString());

            //measure the accuracy
            Random randomGenerator2 = new Random((int)DateTime.Now.Ticks);
            double averageAccuracy  = 0;

            double[] solutionAccuracies = new double[attackRepeatNumber];
            for (int a = 0; a < solutionList.Length; a++)
            {
                sbyte[][] testingData     = new sbyte[AppConstants.TestingSize][]; //these will be phi vectors
                sbyte[]   testingResponse = new sbyte[AppConstants.TestingSize];
                DataGeneration.GenerateTrainingData(xArray[a], testingData, testingResponse, randomGenerator2);
                double accMeasures = rObjArray[0].ObjFunValue(solutionList[a], testingData, testingResponse);
                solutionAccuracies[a] = accMeasures;
                averageAccuracy       = averageAccuracy + accMeasures;
            }
            averageAccuracy = averageAccuracy / (double)attackRepeatNumber;
            Console.Out.WriteLine("The average accuracy for the XOR APUF is " + averageAccuracy.ToString());
            return(solutionAccuracies);
        }
示例#4
0
        //Runs the attack on one PUF model, the cores are used to evaluate the CRPs of one model (in parallel) so the method will run as fast as possible
        public static double ClassicalAttackXORAPUFSingle(int bitNumber, int numXOR)
        {
            //Generate a PUF
            double aPUFMean      = 0.0;
            double aPUFVar       = 1.0;
            double aPUFMeanNoise = 0.0;
            double aPUFNoiseVar  = 0.0;

            //Create the XOR APUF
            XORArbiterPUF xPUF = new XORArbiterPUF(numXOR, bitNumber, aPUFMean, aPUFVar, aPUFMeanNoise, aPUFNoiseVar);

            //Arrays for storing the training data
            sbyte[][] trainingData         = new sbyte[AppConstants.TrainingSize][]; //these will be phi vectors
            sbyte[][] allTrainingResponses = new sbyte[1][];                         //first index PUF, second index sample
            allTrainingResponses[0] = new sbyte[AppConstants.TrainingSize];

            Random[] rGenArray = new Random[AppConstants.CoreNumber];
            for (int i = 0; i < AppConstants.CoreNumber; i++)
            {
                rGenArray[i] = new Random((int)DateTime.Now.Ticks);
                System.Threading.Thread.Sleep(10); //prevent the random number generators from being the same
            }
            DataGeneration.GenerateTrainingDataParallel(xPUF, trainingData, allTrainingResponses, rGenArray);
            Console.Out.WriteLine("Data Generation Complete.");

            //create the objective function for parallel runs
            ObjectiveFunctionResponseXOR rObj = new ObjectiveFunctionResponseXOR();

            //Start the attack run
            var    watch           = System.Diagnostics.Stopwatch.StartNew();
            Random randomGenerator = new Random((int)DateTime.Now.Ticks);;
            int    dimensionNumber = (bitNumber + 1) * xPUF.GetPUFNum(); //the weights of all the XOR APUFs

            sbyte[] trainingResponse = allTrainingResponses[0];
            //Generate the first solution randomly for CMA-ES
            double[] firstSolution = new double[dimensionNumber];
            for (int i = 0; i < firstSolution.Length; i++)
            {
                firstSolution[i] = randomGenerator.NextDouble();
            }
            Console.Out.WriteLine("Beginning CMA-ES");
            //The next line uses maximum parallelism on a single run
            CMAESCandidate solutionCMAES = CMAESMethods.ComputeCMAES(dimensionNumber, rObj, trainingData, trainingResponse, firstSolution, randomGenerator);
            double         solutionVal   = solutionCMAES.GetObjectiveFunctionValue();

            double[] computedSolution = solutionCMAES.GetWeightVector(); //store the solution in independent memory
            Console.Out.WriteLine("CMA-ES finished.");
            watch.Stop();
            Console.Out.WriteLine("Elapsed Time is " + watch.ElapsedMilliseconds.ToString());

            //turn off parallelism
            AppConstants.UseParallelismOnSingleCMAES = false;

            //measure the accuracy
            Random randomGenerator2 = new Random((int)DateTime.Now.Ticks);

            sbyte[][] testingData     = new sbyte[AppConstants.TestingSize][]; //these will be phi vectors
            sbyte[]   testingResponse = new sbyte[AppConstants.TestingSize];
            DataGeneration.GenerateTrainingData(xPUF, testingData, testingResponse, randomGenerator2);
            double accMeasure = rObj.ObjFunValue(computedSolution, testingData, testingResponse);

            Console.Out.WriteLine("The accuracy for the XOR APUF is " + accMeasure.ToString());
            return(accMeasure);
        }
示例#5
0
        public static double[] ClassicalAttackXORAPUFMultiRecovered(int bitNumber, int attackRepeatNumber, InvariantData invData, VariantData[] variantDataArray)
        {
            //Create the XOR APUF for parallel runs
            XORArbiterPUF[] xArray = new XORArbiterPUF[attackRepeatNumber];
            for (int i = 0; i < xArray.Length; i++)
            {
                xArray[i] = (XORArbiterPUF)invData.GetPUFatIndex(i);
            }

            sbyte[][] trainingData         = invData.GetTrainingData();        //these will be phi vectors
            sbyte[][] allTrainingResponses = invData.GetTrainingResponseAll(); //first index PUF, second index sample

            //create the objective function for parallel runs
            ObjectiveFunctionResponseXOR[] rObjArray = new ObjectiveFunctionResponseXOR[attackRepeatNumber];
            for (int i = 0; i < rObjArray.Length; i++)
            {
                rObjArray[i] = new ObjectiveFunctionResponseXOR();
            }
            double[][] solutionList = new double[attackRepeatNumber][];

            Random[] randomGeneratorArray = new Random[attackRepeatNumber];
            for (int r = 0; r < attackRepeatNumber; r++)
            {
                randomGeneratorArray[r] = new Random((int)DateTime.Now.Ticks);
                System.Threading.Thread.Sleep(10); //prevent the random number generators from being the same
            }

            var watch = System.Diagnostics.Stopwatch.StartNew();

            Parallel.For(0, attackRepeatNumber, a =>
            {
                Random randomGenerator   = randomGeneratorArray[a];                 //remove the dependences for parallelization
                int dimensionNumber      = (bitNumber + 1) * xArray[a].GetPUFNum(); //the weights of all the XOR APUFs
                sbyte[] trainingResponse = allTrainingResponses[a];
                //Generate the first solution randomly for CMA-ES
                double[] firstSolution = new double[dimensionNumber];
                for (int i = 0; i < firstSolution.Length; i++)
                {
                    firstSolution[i] = randomGenerator.NextDouble();
                }
                Console.Out.WriteLine("Beginning CMA-ES run # " + a.ToString());
                //CMAESCandidate solutionCMAES = CMAESMethods.ComputeCMAES(dimensionNumber, rObjArray[a], trainingData, trainingResponse, firstSolution, randomGenerator);
                CMAESCandidate solutionCMAES = CMAESMethods.RecoveredCMAES(randomGenerator, a, rObjArray[a], invData, variantDataArray[a]);
                double solutionVal           = solutionCMAES.GetObjectiveFunctionValue();
                solutionList[a] = solutionCMAES.GetWeightVector(); //store the solution in independent memory
                Console.Out.WriteLine("CMA-ES on core " + a.ToString() + " finished.");
                //Console.Out.WriteLine("Final training value is "+solutionVal.ToString());
                //}
            });
            watch.Stop();
            Console.Out.WriteLine("Elapsed Time is " + watch.ElapsedMilliseconds.ToString());

            //measure the accuracy
            Random randomGenerator2 = new Random((int)DateTime.Now.Ticks);
            double averageAccuracy  = 0;

            double[] solutionAccuracies = new double[attackRepeatNumber];
            for (int a = 0; a < solutionList.Length; a++)
            {
                sbyte[][] testingData     = new sbyte[AppConstants.TestingSize][]; //these will be phi vectors
                sbyte[]   testingResponse = new sbyte[AppConstants.TestingSize];
                DataGeneration.GenerateTrainingData(xArray[a], testingData, testingResponse, randomGenerator2);
                double accMeasures = rObjArray[0].ObjFunValue(solutionList[a], testingData, testingResponse);
                solutionAccuracies[a] = accMeasures;
                averageAccuracy       = averageAccuracy + accMeasures;
            }
            averageAccuracy = averageAccuracy / (double)attackRepeatNumber;
            Console.Out.WriteLine("The average accuracy for the XOR APUF is " + averageAccuracy.ToString());
            return(solutionAccuracies);
        }
示例#6
0
        //standard CMA-ES with maximum parallelization

        public static CMAESCandidate ComputeCMAESBecker(int dimensionNumber, PUFObjectiveFunction pFunction, sbyte[][] trainingData, sbyte[] targets, double[] initialWeightVector, Random randomGenerator)
        {
            int             n     = dimensionNumber + 1; //number of weights + epsilon
            Matrix <double> xMean = Matrix <double> .Build.Dense(n, 1);

            CMAESCandidate c11 = new CMAESCandidate(initialWeightVector, trainingData, targets, pFunction);

            Console.Out.WriteLine("Starting Point Accuracy");
            Console.Out.WriteLine(c11.GetObjectiveFunctionValue().ToString());

            CMAESCandidate globalBestCandidate = null;

            double sigma = 0.5;
            //double stopfitness = 1e-10;
            //double stopfitness = 0.99;
            double stopfitness = double.MaxValue;
            int    stopeval    = AppConstants.MaxIterationNumberCMAES;
            //double stopeval = AppConstants.MaxEvaluations
            //Strategy parameter setting: Selection
            int lambdaVal = (int)(4.0 + Math.Floor(3.0 * Math.Log(n)));  //population size, note lambda keyword reserved by python so lambda->lambdaVal
            //lambdaVal = AppConstants.PopulationSizeCMAES #change by KRM cause I think we need more sampling
            double          mu      = lambdaVal / 2.0;
            Matrix <double> weights = Matrix <double> .Build.Dense((int)mu, 1);  //weights = numpy.matrix(weightsPrime, dtype = float).reshape(mu, 1)

            for (int i = 0; i < (int)mu; i++)
            {
                weights[i, 0] = Math.Log(mu + 0.5) - Math.Log(i + 1); //weights[i, 0] = math.log(mu + 0.5) - math.log((i + 1))#use i+1 instead of i in this case to match matlab indexing
            }
            mu = Math.Floor(mu);
            double sumWeights = 0.0;

            for (int i = 0; i < weights.RowCount; i++)
            {
                sumWeights = sumWeights + weights[i, 0];
            }
            //Divide by the sum
            for (int i = 0; i < weights.RowCount; i++)
            {
                weights[i, 0] = weights[i, 0] / sumWeights;
            }
            //Computation for the mueff variable
            double mueffNum = 0.0;
            double mueffDem = 0.0;

            for (int i = 0; i < weights.RowCount; i++)
            {
                mueffNum = weights[i, 0] + mueffNum;
                mueffDem = weights[i, 0] * weights[i, 0] + mueffDem;
            }
            mueffNum = mueffNum * mueffNum;
            double mueff = mueffNum / mueffDem;

            // Strategy parameter setting: Adaptation
            double cc    = (4.0 + mueff / n) / (n + 4.0 + 2.0 * mueff / n);                                     //#time constant for cumulation for C
            double cs    = (mueff + 2.0) / (n + mueff + 5.0);                                                   //#t-const for cumulation for sigma control
            double c1    = 2.0 / ((n + 1.3) * (n + 1.3) + mueff);                                               //#learning rate for rank-one update of C
            double cmu   = Math.Min(1.0 - c1, 2.0 * (mueff - 2.0 + 1.0 / mueff) / ((n + 2) * (n + 2) + mueff)); //# and for rank-mu update
            double damps = 1.0 + 2.0 * Math.Max(0, Math.Sqrt((mueff - 1) / (n + 1)) - 1) + cs;                  //# damping for sigma

            //Initialize dynamic (internal) strategy parameters and constants
            //evolution paths for C and sigma
            Matrix <double> pc = Matrix <double> .Build.Dense(n, 1);

            Matrix <double> ps = Matrix <double> .Build.Dense(n, 1);

            Matrix <double> D = Matrix <double> .Build.Dense(n, 1);

            for (int i = 0; i < n; i++)
            {
                pc[i, 0] = 0;
                ps[i, 0] = 0;
                D[i, 0]  = 1.0;
            }
            //Create B Matrix
            Matrix <double> B = Matrix <double> .Build.Dense(n, n);

            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j < n; j++)
                {
                    if (i == j)
                    {
                        B[i, j] = 1.0;
                    }
                    else
                    {
                        B[i, j] = 0.0;
                    }
                }
            }
            //Create C Matrix
            Matrix <double> dSquare = Matrix <double> .Build.Dense(n, 1);

            for (int i = 0; i < n; i++)
            {
                dSquare[i, 0] = D[i, 0] * D[i, 0];
            }
            Matrix <double> C = B * Diagonalize1DMatrix(dSquare) * B.Transpose(); //C = B * self.diag(DSquare) * numpy.transpose(B)
            //Create invertsqrtC Matrix
            Matrix <double> oneOverD = Matrix <double> .Build.Dense(n, 1);

            for (int i = 0; i < n; i++)
            {
                oneOverD[i, 0] = 1.0 / D[i, 0];
            }
            Matrix <double> invsqrtC  = B * Diagonalize1DMatrix(oneOverD) * B.Transpose();
            double          eigeneval = 0; //track update of B and D
            double          chiN      = Math.Pow(n, 0.5) * (1.0 - 1.0 / (4.0 * n) + 1.0 / (21.0 * Math.Pow(n, 2.0)));
            int             counteval = 0;

            //the next 40 lines contain the 20 lines of interesting code
            //CMAESCandidate[] candidateArray = new CMAESCandidate[lambdaVal];
            List <CMAESCandidate> candidateArray = new List <CMAESCandidate>();

            for (int i = 0; i < lambdaVal; i++)
            {
                candidateArray.Add(new CMAESCandidate());
            }

            while (counteval < stopeval)
            {
                Matrix <double> arx = Matrix <double> .Build.Dense(n, lambdaVal);

                //fill in the initial solutions
                for (int i = 0; i < lambdaVal; i++)
                {
                    Matrix <double> randD = Matrix <double> .Build.Dense(n, 1);

                    for (int j = 0; j < n; j++)
                    {
                        randD[j, 0] = D[j, 0] * GenerateRandomNormalVariableForCMAES(randomGenerator, 0, 1.0);
                    }
                    Matrix <double> inputVector      = xMean + sigma * B * randD;
                    double[]        tempWeightVector = new double[inputVector.RowCount];
                    for (int k = 0; k < inputVector.RowCount; k++)
                    {
                        tempWeightVector[k] = inputVector[k, 0];
                    }
                    candidateArray[i] = new CMAESCandidate(tempWeightVector, trainingData, targets, pFunction);
                    counteval         = counteval + 1;
                }
                candidateArray.Sort(); //This maybe problematic, not sure about sorting in C#
                candidateArray.Reverse();
                Matrix <double> xOld = xMean.Clone();
                //Get the new mean value
                Matrix <double> arxSubset = Matrix <double> .Build.Dense(n, (int)mu); //in Maltab this variable would be "arx(:,arindex(1:mu))

                //This replaces line  arxSubset[:, i] = CandidateList[i].InputVector
                for (int i = 0; i < mu; i++)
                {
                    for (int j = 0; j < n; j++)
                    {
                        arxSubset[j, i] = candidateArray[i].GetWeightVector()[j];
                    }
                }
                xMean = arxSubset * weights; //Line 76 Matlab
                //Cumulation: Update evolution paths
                ps = (1 - cs) * ps + Math.Sqrt(cs * (2.0 - cs) * mueff) * invsqrtC * (xMean - xOld) / sigma;
                //Compute ps.^2 equivalent
                double psSquare = 0;
                for (int i = 0; i < ps.RowCount; i++)
                {
                    psSquare = psSquare + ps[i, 0] * ps[i, 0];
                }

                //Compute hsig
                double hSig         = 0.0;
                double term1ForHsig = psSquare / (1.0 - Math.Pow(1.0 - cs, 2.0 * counteval / lambdaVal)) / n;
                double term2ForHsig = 2.0 + 4.0 / (n + 1.0);
                if (term1ForHsig < term2ForHsig)
                {
                    hSig = 1.0;
                }
                //Compute pc, Line 82 Matlab
                pc = (1.0 - cc) * pc + hSig * Math.Sqrt(cc * (2.0 - cc) * mueff) * (xMean - xOld) / sigma;
                //Adapt covariance matrix C
                Matrix <double> repmatMatrix = Tile((int)mu, xOld); //NOT SURE IF THIS IS RIGHT IN C# FIX repmatMatrix = numpy.tile(xold, mu)
                Matrix <double> artmp        = (1.0 / sigma) * (arxSubset - repmatMatrix);
                // C = (1-c1-cmu) * C  + c1 * (pc * pc' + (1-hsig) * cc*(2-cc) * C) + cmu * artmp * diag(weights) * artmp' #This is the original Matlab line for reference
                C = (1.0 - c1 - cmu) * C + c1 * (pc * pc.Transpose() + (1 - hSig) * cc * (2.0 - cc) * C) + cmu * artmp * Diagonalize1DMatrix(weights) * artmp.Transpose();
                //Adapt step size sigma
                //sigma = sigma * Math.Exp((cs / damps) * (numpy.linalg.norm(ps) / chiN - 1))
                sigma = sigma * Math.Exp((cs / damps) * (ps.L2Norm() / chiN - 1)); //NOT SURE IF THIS IS RIGHT FIX IN C#
                //Update B and D from C
                if ((counteval - eigeneval) > (lambdaVal / (c1 + cmu) / n / 10.0))
                {
                    eigeneval = counteval;
                    //C = numpy.triu(C) + numpy.transpose(numpy.triu(C, 1)) #enforce symmetry
                    C = C.UpperTriangle() + C.StrictlyUpperTriangle().Transpose(); //NOT SURE IF THIS IS RIGHT FIX IN C#

                    //eigen decomposition
                    Evd <double> eigen = C.Evd();
                    B = eigen.EigenVectors;
                    Vector <System.Numerics.Complex> vectorEigenValues = eigen.EigenValues;
                    for (int i = 0; i < vectorEigenValues.Count; i++)
                    {
                        D[i, 0] = vectorEigenValues[i].Real;
                    }
                    //take sqrt of D
                    for (int i = 0; i < vectorEigenValues.Count; i++)
                    {
                        D[i, 0] = Math.Sqrt(D[i, 0]);
                    }

                    for (int i = 0; i < n; i++)
                    {
                        oneOverD[i, 0] = 1.0 / D[i, 0];
                    }
                    Matrix <double> middleTerm = Diagonalize1DMatrix(oneOverD); //#Built in Numpy function doesn't create the right size matrix in this case (ex: Numpy gives 1x1 but should be 5x5)
                    invsqrtC = B * middleTerm * B.Transpose();
                }

                globalBestCandidate = candidateArray[0];
                //Termination Conditions
                //bestCandidate = candidateArray[0]; //Array index 0 has the smallest objective function value
                //if (bestCandidate.GetObjectiveFunctionValue() < currentBestValue)
                //{
                //    currentBestValue = bestCandidate.GetObjectiveFunctionValue();
                //    globalBestCandidate = (CMAESCandidate)bestCandidate.Clone();
                //}
                //if (bestCandidate.GetObjectiveFunctionValue() >= stopfitness)
                //{
                //    return globalBestCandidate;
                //}

                //Add in extra termination conditions
                //if (bestCandidate.GetObjectiveFunctionValue() == currentBestValue)
                //{
                //    repeatCount = repeatCount + 1;

                //}
                //else
                //{
                //    repeatCount = 0;
                //}
                ////we have repeated too many times
                //if (repeatCount > 10)
                //{
                //    Console.Out.WriteLine("CMA-ES terminated to due to repeated best.");
                //    return globalBestCandidate;
                //}
                Console.Out.WriteLine("Iteration #" + counteval.ToString());
                //Console.Out.WriteLine("Best Value=" + (1.0 - currentBestValue).ToString());
                Console.Out.WriteLine("Current Value=" + (candidateArray[candidateArray.Count - 1].GetObjectiveFunctionValue()).ToString());
            }//end while loop
            return(globalBestCandidate); //just in case everything terminates
        }
示例#7
0
        //Run the CMA-ES from a recovered file
        public static CMAESCandidate RecoveredCMAES(Random randomGenerator, int coreNumberForSaving, PUFObjectiveFunction pFunction, InvariantData invData, VariantData varData)
        {
            double stopFitness = 0.98;
            //non-specific variables (used by each core)
            int stopeval = invData.GetMaxEval();

            sbyte[][] trainingData = invData.GetTrainingData();
            sbyte[]   targets      = invData.GetTrainingResponseForPUF(coreNumberForSaving);

            //core specific variables (specific to each run of CMA-ES)
            int             counteval           = varData.GetCurrentEval();
            int             n                   = varData.GetDimensionNum();
            int             lambdaVal           = varData.GetLambda();
            Matrix <double> xMean               = varData.GetXMean();
            Matrix <double> D                   = varData.GetD();
            Matrix <double> B                   = varData.GetB();
            double          sigma               = varData.GetSigma();
            Matrix <double> weights             = varData.GetWeights();
            Matrix <double> ps                  = varData.GetPS();
            double          cs                  = varData.GetCS();
            double          mueff               = varData.GetMueff();
            double          cc                  = varData.GetCC();
            Matrix <double> pc                  = varData.GetPC();
            Matrix <double> invsqrtC            = varData.GetInvSqrtC();
            Matrix <double> C                   = varData.GetMatrixC();
            double          mu                  = varData.GetMu();
            double          c1                  = varData.GetC1();
            double          cmu                 = varData.GetCmu();
            double          damps               = varData.GetDamps();
            double          chiN                = varData.GetChiN();
            double          eigeneval           = varData.GetEigenVal();
            CMAESCandidate  globalBestCandidate = varData.GetGlobalBest();

            Matrix <double> oneOverD = Matrix <double> .Build.Dense(n, 1); //Don't need to copy this because we get it from D

            //just a sanity check
            if (varData.coreNumber != coreNumberForSaving)
            {
                throw new Exception("The saved core number and current core number don't match.");
            }

            //the next 40 lines contain the 20 lines of interesting code
            //CMAESCandidate[] candidateArray = new CMAESCandidate[lambdaVal];
            List <CMAESCandidate> candidateArray = new List <CMAESCandidate>();

            for (int i = 0; i < lambdaVal; i++)
            {
                candidateArray.Add(new CMAESCandidate());
            }

            while (counteval < stopeval)
            {
                Matrix <double> arx = Matrix <double> .Build.Dense(n, lambdaVal);

                //fill in the initial solutions
                for (int i = 0; i < lambdaVal; i++)
                {
                    Matrix <double> randD = Matrix <double> .Build.Dense(n, 1);

                    for (int j = 0; j < n; j++)
                    {
                        randD[j, 0] = D[j, 0] * GenerateRandomNormalVariableForCMAES(randomGenerator, 0, 1.0);
                    }
                    Matrix <double> inputVector      = xMean + sigma * B * randD;
                    double[]        tempWeightVector = new double[inputVector.RowCount];
                    for (int k = 0; k < inputVector.RowCount; k++)
                    {
                        tempWeightVector[k] = inputVector[k, 0];
                    }
                    candidateArray[i] = new CMAESCandidate(tempWeightVector, trainingData, targets, pFunction);
                    counteval         = counteval + 1;
                }
                candidateArray.Sort(); //This maybe problematic, not sure about sorting in C#
                candidateArray.Reverse();
                Matrix <double> xOld = xMean.Clone();
                //Get the new mean value
                Matrix <double> arxSubset = Matrix <double> .Build.Dense(n, (int)mu); //in Maltab this variable would be "arx(:,arindex(1:mu))

                //This replaces line  arxSubset[:, i] = CandidateList[i].InputVector
                for (int i = 0; i < mu; i++)
                {
                    for (int j = 0; j < n; j++)
                    {
                        arxSubset[j, i] = candidateArray[i].GetWeightVector()[j];
                    }
                }
                xMean = arxSubset * weights; //Line 76 Matlab
                //Cumulation: Update evolution paths
                ps = (1 - cs) * ps + Math.Sqrt(cs * (2.0 - cs) * mueff) * invsqrtC * (xMean - xOld) / sigma;
                //Compute ps.^2 equivalent
                double psSquare = 0;
                for (int i = 0; i < ps.RowCount; i++)
                {
                    psSquare = psSquare + ps[i, 0] * ps[i, 0];
                }

                //Compute hsig
                double hSig         = 0.0;
                double term1ForHsig = psSquare / (1.0 - Math.Pow(1.0 - cs, 2.0 * counteval / lambdaVal)) / n;
                double term2ForHsig = 2.0 + 4.0 / (n + 1.0);
                if (term1ForHsig < term2ForHsig)
                {
                    hSig = 1.0;
                }
                //Compute pc, Line 82 Matlab
                pc = (1.0 - cc) * pc + hSig * Math.Sqrt(cc * (2.0 - cc) * mueff) * (xMean - xOld) / sigma;
                //Adapt covariance matrix C
                Matrix <double> repmatMatrix = Tile((int)mu, xOld); //NOT SURE IF THIS IS RIGHT IN C# FIX repmatMatrix = numpy.tile(xold, mu)
                Matrix <double> artmp        = (1.0 / sigma) * (arxSubset - repmatMatrix);
                // C = (1-c1-cmu) * C  + c1 * (pc * pc' + (1-hsig) * cc*(2-cc) * C) + cmu * artmp * diag(weights) * artmp' #This is the original Matlab line for reference
                C = (1.0 - c1 - cmu) * C + c1 * (pc * pc.Transpose() + (1 - hSig) * cc * (2.0 - cc) * C) + cmu * artmp * Diagonalize1DMatrix(weights) * artmp.Transpose();
                //Adapt step size sigma
                //sigma = sigma * Math.Exp((cs / damps) * (numpy.linalg.norm(ps) / chiN - 1))
                sigma = sigma * Math.Exp((cs / damps) * (ps.L2Norm() / chiN - 1)); //NOT SURE IF THIS IS RIGHT FIX IN C#
                //Update B and D from C
                if ((counteval - eigeneval) > (lambdaVal / (c1 + cmu) / n / 10.0))
                {
                    eigeneval = counteval;
                    //C = numpy.triu(C) + numpy.transpose(numpy.triu(C, 1)) #enforce symmetry
                    C = C.UpperTriangle() + C.StrictlyUpperTriangle().Transpose(); //NOT SURE IF THIS IS RIGHT FIX IN C#

                    //eigen decomposition
                    Evd <double> eigen = C.Evd();
                    B = eigen.EigenVectors;
                    Vector <System.Numerics.Complex> vectorEigenValues = eigen.EigenValues;
                    for (int i = 0; i < vectorEigenValues.Count; i++)
                    {
                        D[i, 0] = vectorEigenValues[i].Real;
                    }
                    //take sqrt of D
                    for (int i = 0; i < vectorEigenValues.Count; i++)
                    {
                        D[i, 0] = Math.Sqrt(D[i, 0]);
                    }

                    for (int i = 0; i < n; i++)
                    {
                        oneOverD[i, 0] = 1.0 / D[i, 0];
                    }
                    Matrix <double> middleTerm = Diagonalize1DMatrix(oneOverD); //#Built in Numpy function doesn't create the right size matrix in this case (ex: Numpy gives 1x1 but should be 5x5)
                    invsqrtC = B * middleTerm * B.Transpose();
                }

                globalBestCandidate = candidateArray[0];
                //Stopping condition
                if (globalBestCandidate.GetObjectiveFunctionValue() > stopFitness)
                {
                    return(globalBestCandidate);
                }

                //Time to save the state
                VariantData     varDataUpdated = new VariantData(coreNumberForSaving, counteval, n, lambdaVal, xMean, D, B, sigma, weights, ps, cs, mueff, cc, invsqrtC, C, mu, pc, c1, cmu, damps, chiN, eigeneval, globalBestCandidate);
                string          fname          = AppConstants.SaveDir + counteval.ToString() + "VariantData" + coreNumberForSaving.ToString();
                FileInfo        fi             = new FileInfo(fname);
                Stream          str            = fi.Open(FileMode.OpenOrCreate, FileAccess.Write);
                BinaryFormatter bf             = new BinaryFormatter();
                bf.Serialize(str, varDataUpdated);
                str.Close();

                Console.Out.WriteLine("Iteration #" + counteval.ToString());
                Console.Out.WriteLine("Current Value=" + (candidateArray[candidateArray.Count - 1].GetObjectiveFunctionValue()).ToString());
            }//end while loop
            return(globalBestCandidate); //just in case everything terminates
        }