示例#1
0
 /// <summary>
 ///     Construct the parallel task.
 /// </summary>
 /// <param name="genome">The genome.</param>
 /// <param name="theOwner">The owner.</param>
 public ParallelScoreTask(IGenome genome, ParallelScore theOwner)
 {
     owner         = theOwner;
     this.genome   = genome;
     scoreFunction = theOwner.ScoreFunction;
     adjusters     = theOwner.Adjusters;
 }
示例#2
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="theAlgorithm">The algorithm to fit.</param>
        /// <param name="theScore">The score function.</param>
        /// <param name="thePopulationSize">The population size.</param>
        public ContinuousACO(IMLMethod theAlgorithm, IScoreFunction theScore, int thePopulationSize)
        {
            Epsilon = .75;

            _algorithm      = theAlgorithm;
            _populationSize = thePopulationSize;
            _score          = theScore;
            Random          = new MersenneTwisterGenerateRandom();
            _paramCount     = theAlgorithm.LongTermMemory.Length;

            _population = new ContinuousAnt[thePopulationSize * 2];
            _weighting  = new double[thePopulationSize];
            for (int i = 0; i < _population.Length; i++)
            {
                _population[i] = new ContinuousAnt(_paramCount, _score.ShouldMinimize);
                for (int j = 0; j < _paramCount; j++)
                {
                    _population[i].Params[j] = Random.NextDouble(-1, 1);
                }
            }

            UpdateScore();
            Array.Sort(_population);
            ComputeWeighting();
            SampleSolutions();
            Array.Sort(_population);
        }
示例#3
0
        /// <summary>
        /// Construct PSO trainer.
        /// </summary>
        /// <param name="theParticles">The particles to use.</param>
        /// <param name="theCalculateScore">The score object.</param>
        public TrainPSO(IMLMethod[] theParticles,
                        IScoreFunction theCalculateScore)
        {
            _particles = theParticles;
            _score     = theCalculateScore;
            int vectorSize    = theParticles[0].LongTermMemory.Length;
            int particleCount = theParticles.Length;

            _bestVectors = new double[particleCount][];
            _velocities  = new double[particleCount][];
            _bestScores  = new double[particleCount];

            for (int i = 0; i < particleCount; i++)
            {
                _bestVectors[i] = new double[vectorSize];
                _velocities[i]  = new double[vectorSize];
            }

            _bestVectorIndex = -1;

            _bestVector = new double[vectorSize];

            foreach (double[] velocity in _velocities)
            {
                VectorAlgebra.Randomise(_rnd, velocity, this.maxVelocity);
            }
        }
示例#4
0
        public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, double stepValue)
        {
            _algorithm = theAlgorithm;
            _score     = theScore;

            _start          = (double[])_algorithm.LongTermMemory.Clone();
            _trainedWeights = (double[])_algorithm.LongTermMemory.Clone();

            int n = _start.Length;

            _p      = new double[n * (n + 1)];
            _pstar  = new double[n];
            _p2Star = new double[n];
            _pbar   = new double[n];
            _y      = new double[n + 1];

            _nn  = n + 1;
            _del = 1.0;
            _rq  = 0.000001 * n;

            _step   = new double[_start.Length];
            _jcount = _konvge = 500;
            for (int i = 0; i < _step.Length; i++)
            {
                _step[i] = stepValue;
            }
        }
        public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
        {
            var jo     = JObject.Load(reader);
            var filter = jo.Property("filter")?.Value.ToObject <QueryContainer>(serializer);
            var weight = jo.Property("weight")?.Value.ToObject <double?>();

            ;
            IScoreFunction function = null;

            foreach (var prop in jo.Properties())
            {
                switch (prop.Name)
                {
                case "exp":
                case "gauss":
                case "linear":
                    var properties = prop.Value.Value <JObject>().Properties().ToList();
                    var fieldProp  = properties.First(p => p.Name != "multi_value_mode");
                    var field      = fieldProp.Name;
                    var f          = ReadDecayFunction(prop.Name, fieldProp.Value.Value <JObject>(), serializer);
                    f.Field = field;
                    var mv = properties.FirstOrDefault(p => p.Name == "multi_value_mode")?.Value;
                    if (mv != null)
                    {
                        f.MultiValueMode = serializer.Deserialize <MultiValueMode>(mv.CreateReader());
                    }
                    function = f;

                    break;

                case "random_score":
                    function = FromJson.ReadAs <RandomScoreFunction>(prop.Value.Value <JObject>().CreateReader(), serializer);
                    break;

                case "field_value_factor":
                    function = FromJson.ReadAs <FieldValueFactorFunction>(prop.Value.Value <JObject>().CreateReader(), serializer);
                    break;

                case "script_score":
                    function = FromJson.ReadAs <ScriptScoreFunction>(prop.Value.Value <JObject>().CreateReader(), serializer);
                    break;
                }
            }
            if (function == null && weight.HasValue)
            {
                function = new WeightFunction {
                    Weight = weight
                }
            }
            ;
            else if (function == null)
            {
                return(null);                                   //throw new Exception("error deserializing function score function");
            }
            function.Weight = weight;
            function.Filter = filter;
            return(function);
        }
示例#6
0
        /// <summary>
        ///     Construct a greedy random algorithm.
        /// </summary>
        /// <param name="theShouldMinimize">True, if we should minimize.</param>
        /// <param name="theAlgorithm">The algorithm to optimize.</param>
        /// <param name="theScore">The score function.</param>
        public TrainGreedyRandom(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
        {
            _algorithm      = theAlgorithm;
            _score          = theScore;
            _shouldMinimize = theShouldMinimize;

            // Set the last error to a really bad value so it will be reset on the first iteration.
            _lastError = _shouldMinimize ? double.PositiveInfinity : Double.NegativeInfinity;
        }
示例#7
0
        /// <summary>
        ///     Construct a greedy random algorithm.
        /// </summary>
        /// <param name="theShouldMinimize">True, if we should minimize.</param>
        /// <param name="theAlgorithm">The algorithm to optimize.</param>
        /// <param name="theScore">The score function.</param>
        public TrainGreedyRandom(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
        {
            _algorithm = theAlgorithm;
            _score = theScore;
            _shouldMinimize = theShouldMinimize;

            // Set the last error to a really bad value so it will be reset on the first iteration.
            _lastError = _shouldMinimize ? double.PositiveInfinity : Double.NegativeInfinity;
        }
        public virtual void InitializeQValue(List <IIdentifiedSpectrum> spectra)
        {
            IScoreFunction scoreFunctions = Options.ScoreFunction;

            CalculateQValueFunc qValueFunc = Options.Parent.FalseDiscoveryRate.GetQValueFunction();

            IFalseDiscoveryRateCalculator fdrCalc = Options.Parent.FalseDiscoveryRate.GetFalseDiscoveryRateCalculator();

            qValueFunc(spectra, scoreFunctions, fdrCalc);
        }
示例#9
0
 /// <summary>
 ///     Construct the parallel score calculation object.
 /// </summary>
 /// <param name="thePopulation">The population to score.</param>
 /// <param name="theCODEC">The CODEC to use.</param>
 /// <param name="theAdjusters">The score adjusters to use.</param>
 /// <param name="theScoreFunction">The score function.</param>
 /// <param name="theThreadCount">The requested thread count.</param>
 public ParallelScore(IPopulation thePopulation, IGeneticCODEC theCODEC,
                      IList <IAdjustScore> theAdjusters, IScoreFunction theScoreFunction,
                      int theThreadCount)
 {
     _codec         = theCODEC;
     _population    = thePopulation;
     _scoreFunction = theScoreFunction;
     _adjusters     = theAdjusters;
     ThreadCount    = theThreadCount;
 }
示例#10
0
 /// <summary>
 ///     Construct the simulated annealing trainer.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 /// <param name="theKMax">The max number of iterations.</param>
 /// <param name="theStartingTemperature">The starting temperature.</param>
 /// <param name="theEndingTemperature">The ending temperature.</param>
 public TrainAnneal(IMLMethod theAlgorithm, IScoreFunction theScore, int theKMax,
                    double theStartingTemperature, double theEndingTemperature)
 {
     _algorithm           = theAlgorithm;
     _score               = theScore;
     _kMax                = theKMax;
     _currentError        = _score.CalculateScore(_algorithm);
     _startingTemperature = theStartingTemperature;
     _endingTemperature   = theEndingTemperature;
     _globalBest          = new double[theAlgorithm.LongTermMemory.Length];
     Array.Copy(_algorithm.LongTermMemory, 0, _globalBest, 0, _globalBest.Length);
 }
示例#11
0
        public void Function_score_query_must_transform_correclty_to_ES()
        {
            var functions = new IScoreFunction[]
            {
                new FilterScoreFunction(new MatchQuery("type", "text"), 2),
                new DecayScoreFunction(DecayFunction.Gauss, "date", "2013-09-17", "10d", "5d", 0.5)
            };

            var query = new FunctionScoreQuery(new MatchQuery("headline", "Yuri Metelkin", true), functions);

            Assert.IsTrue(query.Query.Type == QueryType.MatchQuery);
            Assert.IsTrue(((MatchQuery)query.Query).Field == "headline");
            Assert.IsTrue(((MatchQuery)query.Query).Value.ToString() == "Yuri Metelkin");
            Assert.IsTrue(((MatchQuery)query.Query).IsAnd);

            var f = query.Functions[0];

            Assert.IsTrue(f.Type == QueryType.FilterScoreFunction);
            var fsf = f as FilterScoreFunction;

            Assert.IsTrue(fsf.Weight == 2);
            f = query.Functions[1];
            Assert.IsTrue(f.Type == QueryType.DecayScoreFunction);
            var dsf = f as DecayScoreFunction;

            Assert.IsTrue(dsf.Function == DecayFunction.Gauss);

            var json = query.ToString();
            var jo   = JsonObject.Parse(json);
            var q    = jo.ToQuery();

            Assert.IsTrue(q.Type == QueryType.FunctionScoreQuery);
            var fs    = q as FunctionScoreQuery;
            var match = fs.Query as MatchQuery;

            Assert.IsTrue(match.Field == "headline");
            Assert.IsTrue(match.Value.ToString() == "Yuri Metelkin");
            Assert.IsTrue(match.IsAnd);

            f = query.Functions[0];
            Assert.IsTrue(f.Type == QueryType.FilterScoreFunction);
            fsf = f as FilterScoreFunction;
            Assert.IsTrue(fsf.Weight == 2);
            f = query.Functions[1];
            Assert.IsTrue(f.Type == QueryType.DecayScoreFunction);
            dsf = f as DecayScoreFunction;
            Assert.IsTrue(dsf.Function == DecayFunction.Gauss);
        }
示例#12
0
        /// <summary>
        ///     Construct an EA.
        /// </summary>
        /// <param name="thePopulation">The population.</param>
        /// <param name="theScoreFunction">The score function.</param>
        public BasicEA(IPopulation thePopulation,
                       IScoreFunction theScoreFunction)
        {
            RandomNumberFactory = new MersenneTwisterFactory();
            EliteRate           = 0.3;
            MaxTries            = 5;
            MaxOperationErrors  = 500;
            CODEC = new GenomeAsPhenomeCODEC();


            Population    = thePopulation;
            ScoreFunction = theScoreFunction;
            Selection     = new TournamentSelection(this, 4);

            // set the score compare method
            if (theScoreFunction.ShouldMinimize)
            {
                SelectionComparer = new MinimizeAdjustedScoreComp();
                BestComparer      = new MinimizeScoreComp();
            }
            else
            {
                SelectionComparer = new MaximizeAdjustedScoreComp();
                BestComparer      = new MaximizeScoreComp();
            }

            // set the iteration
            foreach (ISpecies species in thePopulation.Species)
            {
                foreach (IGenome genome in species.Members)
                {
                    IterationNumber = Math.Max(IterationNumber,
                                               genome.BirthGeneration);
                }
            }


            // Set a best genome, just so it is not null.
            // We won't know the true best genome until the first iteration.
            if (Population.Species.Count > 0 && Population.Species[0].Members.Count > 0)
            {
                BestGenome = Population.Species[0].Members[0];
            }
        }
示例#13
0
        /// <summary>
        ///     Construct a hill climbing algorithm.
        /// </summary>
        /// <param name="theShouldMinimize">True, if we should minimize.</param>
        /// <param name="theAlgorithm">The algorithm to optimize.</param>
        /// <param name="theScore">The scoring function.</param>
        /// <param name="acceleration">The acceleration for step sizes.</param>
        /// <param name="stepSize">The initial step sizes.</param>
        public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore,
            double acceleration, double stepSize)
        {
            _algorithm = theAlgorithm;
            _score = theScore;
            _shouldMinimize = theShouldMinimize;

            _stepSize = new double[theAlgorithm.LongTermMemory.Length];
            for (int i = 0; i < theAlgorithm.LongTermMemory.Length; i++)
            {
                _stepSize[i] = stepSize;
            }

            _candidate[0] = -acceleration;
            _candidate[1] = -1/acceleration;
            _candidate[2] = 0;
            _candidate[3] = 1/acceleration;
            _candidate[4] = acceleration;

            // Set the last error to a really bad value so it will be reset on the first iteration.
            _lastError = _shouldMinimize ? double.PositiveInfinity : double.NegativeInfinity;
        }
示例#14
0
        /// <summary>
        ///     Construct a hill climbing algorithm.
        /// </summary>
        /// <param name="theShouldMinimize">True, if we should minimize.</param>
        /// <param name="theAlgorithm">The algorithm to optimize.</param>
        /// <param name="theScore">The scoring function.</param>
        /// <param name="acceleration">The acceleration for step sizes.</param>
        /// <param name="stepSize">The initial step sizes.</param>
        public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore,
                              double acceleration, double stepSize)
        {
            _algorithm      = theAlgorithm;
            _score          = theScore;
            _shouldMinimize = theShouldMinimize;

            _stepSize = new double[theAlgorithm.LongTermMemory.Length];
            for (int i = 0; i < theAlgorithm.LongTermMemory.Length; i++)
            {
                _stepSize[i] = stepSize;
            }

            _candidate[0] = -acceleration;
            _candidate[1] = -1 / acceleration;
            _candidate[2] = 0;
            _candidate[3] = 1 / acceleration;
            _candidate[4] = acceleration;

            // Set the last error to a really bad value so it will be reset on the first iteration.
            _lastError = _shouldMinimize ? double.PositiveInfinity : double.NegativeInfinity;
        }
示例#15
0
 /// <summary>
 ///     Construct the parallel score calculation object.
 /// </summary>
 /// <param name="thePopulation">The population to score.</param>
 /// <param name="theCODEC">The CODEC to use.</param>
 /// <param name="theAdjusters">The score adjusters to use.</param>
 /// <param name="theScoreFunction">The score function.</param>
 /// <param name="theThreadCount">The requested thread count.</param>
 public ParallelScore(IPopulation thePopulation, IGeneticCODEC theCODEC,
                      IList<IAdjustScore> theAdjusters, IScoreFunction theScoreFunction,
                      int theThreadCount)
 {
     _codec = theCODEC;
     _population = thePopulation;
     _scoreFunction = theScoreFunction;
     _adjusters = theAdjusters;
     ThreadCount = theThreadCount;
 }
示例#16
0
 /// <summary>
 ///     Construct the parallel task.
 /// </summary>
 /// <param name="genome">The genome.</param>
 /// <param name="theOwner">The owner.</param>
 public ParallelScoreTask(IGenome genome, ParallelScore theOwner)
 {
     owner = theOwner;
     this.genome = genome;
     scoreFunction = theOwner.ScoreFunction;
     adjusters = theOwner.Adjusters;
 }
示例#17
0
 /// <summary>
 ///     Construct a hill climbing algorithm. Use acceleration of 1.2 and initial step size of 1.
 /// </summary>
 /// <param name="theShouldMinimize">True, if we should minimize.</param>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The scoring function.</param>
 public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
     : this(theShouldMinimize, theAlgorithm, theScore, 1.2, 1)
 {
 }
示例#18
0
 public OptimalResultCalculator(IScoreFunction scoreFunctions)
 {
     this.ScoreFunc = scoreFunctions;
 }
示例#19
0
 /// <summary>
 ///     Construct a hill climbing algorithm. Use acceleration of 1.2 and initial step size of 1.
 /// </summary>
 /// <param name="theShouldMinimize">True, if we should minimize.</param>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The scoring function.</param>
 public TrainHillClimb(bool theShouldMinimize, IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
     : this(theShouldMinimize, theAlgorithm, theScore, 1.2, 1)
 {
 }
        /// <summary>
        /// 根据给定分数排序函数以及FDR计算器对鉴定谱图列表计算QValue。
        /// </summary>
        /// <param name="peptides">谱图列表</param>
        /// <param name="scoreFuncs">与分数提取、排序相关类</param>
        /// <param name="fdrCalc">FDR计算器</param>
        public static void CalculateQValue(List <IIdentifiedSpectrum> peptides, IScoreFunction scoreFuncs, IFalseDiscoveryRateCalculator fdrCalc)
        {
            if (peptides.Count == 0)
            {
                return;
            }

            scoreFuncs.SortSpectrum(peptides);

            int totalTarget = 0;
            int totalDecoy  = 0;

            HashSet <string> filenames = new HashSet <string>();

            foreach (IIdentifiedSpectrum spectrum in peptides)
            {
                spectrum.QValue = 0.0;
                if (filenames.Contains(spectrum.Query.FileScan.LongFileName))
                {
                    continue;
                }
                filenames.Add(spectrum.Query.FileScan.LongFileName);

                if (spectrum.FromDecoy)
                {
                    totalDecoy++;
                }
                else
                {
                    totalTarget++;
                }
            }

            double lastScore  = scoreFuncs.GetScore(peptides[peptides.Count - 1]);
            double lastQvalue = fdrCalc.Calculate(totalDecoy, totalTarget);

            for (int i = peptides.Count - 1; i >= 0; i--)
            {
                double score = scoreFuncs.GetScore(peptides[i]);
                if (score != lastScore)
                {
                    lastScore  = score;
                    lastQvalue = fdrCalc.Calculate(totalDecoy, totalTarget);
                    if (lastQvalue == 0.0)
                    {
                        break;
                    }
                    peptides[i].QValue = lastQvalue;
                }
                else
                {
                    peptides[i].QValue = lastQvalue;
                }

                if (peptides[i].FromDecoy)
                {
                    totalDecoy--;
                }
                else
                {
                    totalTarget--;
                }
            }
        }
        public static void CalculateUniqueQValue(List <IIdentifiedSpectrum> peptides, IScoreFunction scoreFuncs, IFalseDiscoveryRateCalculator fdrCalc)
        {
            if (peptides.Count == 0)
            {
                return;
            }

            scoreFuncs.SortSpectrum(peptides);

            List <IIdentifiedSpectrum> sameScores = new List <IIdentifiedSpectrum>();
            HashSet <string>           targetSeq  = new HashSet <string>();
            HashSet <string>           decoySeq   = new HashSet <string>();

            double lastScore = scoreFuncs.GetScore(peptides[0]);

            for (int i = 0; i < peptides.Count; i++)
            {
                IIdentifiedSpectrum spectrum = peptides[i];
                double score = scoreFuncs.GetScore(peptides[i]);
                if (score == lastScore)
                {
                    sameScores.Add(spectrum);
                    if (spectrum.FromDecoy)
                    {
                        decoySeq.Add(spectrum.Peptide.PureSequence);
                    }
                    else
                    {
                        targetSeq.Add(spectrum.Peptide.PureSequence);
                    }
                    continue;
                }
                else
                {
                    double qValue = fdrCalc.Calculate(decoySeq.Count, targetSeq.Count);
                    foreach (IIdentifiedSpectrum sameScoreSpectrum in sameScores)
                    {
                        sameScoreSpectrum.QValue = qValue;
                    }

                    sameScores.Clear();

                    lastScore = score;
                    sameScores.Add(spectrum);
                    if (spectrum.FromDecoy)
                    {
                        decoySeq.Add(spectrum.Peptide.PureSequence);
                    }
                    else
                    {
                        targetSeq.Add(spectrum.Peptide.PureSequence);
                    }
                    continue;
                }
            }
            double lastQValue = fdrCalc.Calculate(decoySeq.Count, targetSeq.Count);

            foreach (IIdentifiedSpectrum sameScoreSpectrum in sameScores)
            {
                sameScoreSpectrum.QValue = lastQValue;
            }
        }
示例#22
0
 /// <summary>
 ///     Construct the simulated annealing trainer.  Use 1000 iterations and temperature from 400 to 0.0001.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 public TrainAnneal(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
     : this(theAlgorithm, theScore, 1000, 400, 0.0001)
 {
 }
示例#23
0
 /// <summary>
 ///     Construct the simulated annealing trainer.  Use 1000 iterations and temperature from 400 to 0.0001.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 public TrainAnneal(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
     : this(theAlgorithm, theScore, 1000, 400, 0.0001)
 {
 }
示例#24
0
 /// <summary>
 ///     Construct the simulated annealing trainer.  Use 1000 iterations and temperature from 400 to 0.0001.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 public TrainAnneal(IMLMethod theAlgorithm, IScoreFunction theScore)
     : this(theAlgorithm, theScore, 1000, 400, 0.0001)
 {
 }
 /// <summary>
 /// 将来自同一个谱图,假设为相同电荷,根据不同搜索条件(例如根据SILAC的重标和轻标)得到的多个鉴定结果比较。
 /// 保留所有Score最高的结果(可有多个)。
 /// 该函数用于进行肽段筛选之前去冗余。
 /// </summary>
 /// <param name="peptides">鉴定谱图列表</param>
 /// <returns></returns>
 public static void KeepUnconflictPeptidesFromSameEngineDifferentParameters(List <IIdentifiedSpectrum> peptides, IScoreFunction score)
 {
     DoFilterPeptideFromSameEngineDifferentParameters(peptides, score, KepTopScore);
 }
示例#26
0
 public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
     : this(theAlgorithm, theScore, 100)
 {
 }
        public static void DoFilterPeptideFromSameEngineDifferentParameters(List <IIdentifiedSpectrum> peptides, IScoreFunction score, Action <List <IIdentifiedSpectrum> > filter)
        {
            var  dic     = peptides.ToGroupDictionary(m => m.Query.FileScan.Experimental);
            bool changed = false;

            foreach (var v in dic.Values)
            {
                var  vdic           = v.ToGroupDictionary(m => m.Query.FileScan.FirstScan);
                bool currentChanged = false;
                foreach (var peps in vdic.Values)
                {
                    if (peps.Count > 1)
                    {
                        score.SortSpectrum(peps);
                        filter(peps);
                        currentChanged = true;
                    }
                }

                if (currentChanged)
                {
                    changed = true;
                    v.Clear();
                    foreach (var peps in vdic.Values)
                    {
                        v.AddRange(peps);
                    }
                }
            }

            if (changed)
            {
                peptides.Clear();
                foreach (var v in dic.Values)
                {
                    peptides.AddRange(v);
                }
            }
        }
示例#28
0
        /// <summary>
        /// Construct PSO trainer.
        /// </summary>
        /// <param name="theParticles">The particles to use.</param>
        /// <param name="theCalculateScore">The score object.</param>
        public TrainPSO(IMLMethod[] theParticles,
                        IScoreFunction theCalculateScore)
        {
            _particles = theParticles;
            _score = theCalculateScore;
            int vectorSize = theParticles[0].LongTermMemory.Length;
            int particleCount = theParticles.Length;

            _bestVectors = new double[particleCount][];
            _velocities = new double[particleCount][];
            _bestScores = new double[particleCount];

            for (int i = 0; i < particleCount; i++)
            {
                _bestVectors[i] = new double[vectorSize];
                _velocities[i] = new double[vectorSize];
            }

            _bestVectorIndex = -1;

            _bestVector = new double[vectorSize];

            foreach (double[] velocity in _velocities)
            {
                VectorAlgebra.Randomise(_rnd, velocity, this.maxVelocity);
            }
        }
示例#29
0
        public void CalculateToleranceScore(IScoreFunction scoreFunc)
        {
            scoreFunc.SortSpectrum(this.Spectra);

            this.Result.Score = this.Spectra.Count > 0 ? scoreFunc.GetScore(this.Spectra.Last()) : 0.0;
        }
示例#30
0
 public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore)
     : this(theAlgorithm, theScore, 100)
 {
 }
示例#31
0
 /// <summary>
 ///     Construct the simulated annealing trainer.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 /// <param name="theKMax">The max number of iterations.</param>
 /// <param name="theStartingTemperature">The starting temperature.</param>
 /// <param name="theEndingTemperature">The ending temperature.</param>
 public TrainAnneal(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, int theKMax,
     double theStartingTemperature, double theEndingTemperature)
 {
     _algorithm = theAlgorithm;
     _score = theScore;
     _kMax = theKMax;
     _currentError = _score.CalculateScore(_algorithm);
     _startingTemperature = theStartingTemperature;
     _endingTemperature = theEndingTemperature;
     _globalBest = new double[theAlgorithm.LongTermMemory.Length];
     Array.Copy(_algorithm.LongTermMemory, 0, _globalBest, 0, _globalBest.Length);
 }
示例#32
0
        public TrainNelderMead(IMachineLearningAlgorithm theAlgorithm, IScoreFunction theScore, double stepValue)
        {
            _algorithm = theAlgorithm;
            _score = theScore;

            _start = (double[]) _algorithm.LongTermMemory.Clone();
            _trainedWeights = (double[]) _algorithm.LongTermMemory.Clone();

            int n = _start.Length;

            _p = new double[n*(n + 1)];
            _pstar = new double[n];
            _p2Star = new double[n];
            _pbar = new double[n];
            _y = new double[n + 1];

            _nn = n + 1;
            _del = 1.0;
            _rq = 0.000001*n;

            _step = new double[_start.Length];
            _jcount = _konvge = 500;
            for (int i = 0; i < _step.Length; i++)
            {
                _step[i] = stepValue;
            }
        }
示例#33
0
 /// <summary>
 ///     Construct the simulated annealing trainer.  Use 1000 iterations and temperature from 400 to 0.0001.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 public TrainAnneal(IMLMethod theAlgorithm, IScoreFunction theScore)
     : this(theAlgorithm, theScore, 1000, 400, 0.0001)
 {
 }
示例#34
0
        /// <summary>
        ///     Construct an EA.
        /// </summary>
        /// <param name="thePopulation">The population.</param>
        /// <param name="theScoreFunction">The score function.</param>
        public BasicEA(IPopulation thePopulation,
            IScoreFunction theScoreFunction)
        {
            RandomNumberFactory = new MersenneTwisterFactory();
            EliteRate = 0.3;
            MaxTries = 5;
            MaxOperationErrors = 500;
            CODEC = new GenomeAsPhenomeCODEC();


            Population = thePopulation;
            ScoreFunction = theScoreFunction;
            Selection = new TournamentSelection(this, 4);

            // set the score compare method
            if (theScoreFunction.ShouldMinimize)
            {
                SelectionComparer = new MinimizeAdjustedScoreComp();
                BestComparer = new MinimizeScoreComp();
            }
            else
            {
                SelectionComparer = new MaximizeAdjustedScoreComp();
                BestComparer = new MaximizeScoreComp();
            }

            // set the iteration
            foreach (ISpecies species in thePopulation.Species)
            {
                foreach (IGenome genome in species.Members)
                {
                    IterationNumber = Math.Max(IterationNumber,
                        genome.BirthGeneration);
                }
            }


            // Set a best genome, just so it is not null.
            // We won't know the true best genome until the first iteration.
            if (Population.Species.Count > 0 && Population.Species[0].Members.Count > 0)
            {
                BestGenome = Population.Species[0].Members[0];
            }
        }
示例#35
0
        /// <summary>
        /// The constructor. 
        /// </summary>
        /// <param name="theAlgorithm">The algorithm to fit.</param>
        /// <param name="theScore">The score function.</param>
        /// <param name="thePopulationSize">The population size.</param>
        public ContinuousACO(IMLMethod theAlgorithm, IScoreFunction theScore, int thePopulationSize)
        {
            Epsilon = .75;

            _algorithm = theAlgorithm;
            _populationSize = thePopulationSize;
            _score = theScore;
            Random = new MersenneTwisterGenerateRandom();
            _paramCount = theAlgorithm.LongTermMemory.Length;

            _population = new ContinuousAnt[thePopulationSize * 2];
            _weighting = new double[thePopulationSize];
            for (int i = 0; i < _population.Length; i++)
            {
                _population[i] = new ContinuousAnt(_paramCount, _score.ShouldMinimize);
                for (int j = 0; j < _paramCount; j++)
                {
                    _population[i].Params[j] = Random.NextDouble(-1, 1);
                }
            }

            UpdateScore();
            Array.Sort(_population);
            ComputeWeighting();
            SampleSolutions();
            Array.Sort(_population);

        }
 public QValueCalculator(IScoreFunction scoreFunc, IFalseDiscoveryRateCalculator fdrCalc)
 {
     this.scoreFunc = scoreFunc;
     this.fdrCalc   = fdrCalc;
 }