Example #1
0
        static public Predictor GetInstance(string modelName, int eLength, int ncLength, Converter <Hla, Hla> hlaForNormalization)
        {
            Predictor aBestPredictor = new Predictor();

            aBestPredictor.LoadUpPredictor(modelName, eLength, ncLength, hlaForNormalization);
            return(aBestPredictor);
        }
Example #2
0
        private void LoadUpPredictor(string modelName, int eLength, int ncLength, Converter <Hla, Hla> hlaForNormalization)
        {
            //Load up the predictor

            string featurerizerName;

            switch (modelName.ToLower())
            {
            //!!!would be better not to have multiple of these switch statements around - looks like a job for a Class
            case "lanliedb03062007":
                featurerizerName   = "[email protected]";
                SampleNEC          = NEC.GetInstance("", new string(' ', eLength), "");
                HlaFactory         = HlaFactory.GetFactory("MixedWithB15AndA68");
                SourceDataFileName = "lanlIedb03062007.pos.source.txt";
                NameList           = new string[] { "LANL", "IEDB" };
                break;

            default:
                SpecialFunctions.CheckCondition(false, "Don't know what featurerizer to use for the model");
                featurerizerName   = null;
                SourceDataFileName = null;
                NameList           = null;
                break;
            }
            Converter <object, Set <IHashableFeature> > featurizer = FeatureLib.CreateFeaturizer(featurerizerName);

            //GeneratorType generatorType = GeneratorType.ComboAndZero6SuperType;
            //FeatureSerializer featureSerializer = PositiveNegativeExperimentCollection.GetFeatureSerializer();
            //KmerDefinition = kmerDefinition;
            //HlaResolution hlaResolution = HlaResolution.ABMixed;
            string resourceName = string.Format("maxentModel{0}{1}{2}{3}.xml", modelName.Split('.')[0], SampleNEC.N.Length, SampleNEC.E.Length, SampleNEC.C.Length);

            EpitopeLearningDataList = new List <EpitopeLearningDataDupHlaOK>();
            using (StreamReader streamReader = Predictor.OpenResource(resourceName))
            {
                Logistic = (Logistic)FeatureLib.FeatureSerializer.FromXmlStreamReader(streamReader);
                //Logistic.FeatureGenerator = EpitopeFeatureGenerator.GetInstance(KmerDefinition, generatorType, featureSerializer).GenerateFeatureSet;
                Logistic.FeatureGenerator = FeatureLib.CreateFeaturizer(featurerizerName);
                foreach (string name in NameList)
                {
                    EpitopeLearningData epitopeLearningDataX = EpitopeLearningData.GetDbWhole(HlaFactory, SampleNEC.E.Length, name, SourceDataFileName);
                    Debug.Assert(epitopeLearningDataX.Count > 0, "Expect given data to have some data");
                    //!!!combine with previous step
                    EpitopeLearningDataDupHlaOK epitopeLearningData = new EpitopeLearningDataDupHlaOK(epitopeLearningDataX.Name);
                    foreach (KeyValuePair <Pair <string, Hla>, bool> merAndHlaAndLabel in epitopeLearningDataX)
                    {
                        Hla hlaIn  = merAndHlaAndLabel.Key.Second;
                        Hla hlaOut = hlaForNormalization(hlaIn);

                        Dictionary <Hla, Dictionary <Hla, bool> > hla2ToHlaToLabel = SpecialFunctions.GetValueOrDefault(epitopeLearningData, merAndHlaAndLabel.Key.First);
                        Dictionary <Hla, bool> hlaToLabel = SpecialFunctions.GetValueOrDefault(hla2ToHlaToLabel, hlaOut);
                        hlaToLabel.Add(hlaIn, merAndHlaAndLabel.Value);
                    }

                    EpitopeLearningDataList.Add(epitopeLearningData);
                }
            }

            HlaForNormalization = hlaForNormalization;
        }
        private Dictionary <Hla, Dictionary <int, int> > CreateHlaToLengthToLengthToSmoothedCount()
        {
            Dictionary <Hla, Dictionary <int, int> > hlaToLengthToLengthToSmoothedCount = new Dictionary <Hla, Dictionary <int, int> >();

            for (int k = (int)MerLength.firstLength; k <= (int)MerLength.lastLength; ++k)
            {
                Predictor predictor = KToPredictor[k];
                foreach (Pair <string, Hla> merAndHlaToLength in predictor.PositiveExampleEnumeration())
                {
                    Dictionary <int, int> lengthToSmoothedCount = SpecialFunctions.GetValueOrDefault(hlaToLengthToLengthToSmoothedCount, merAndHlaToLength.Second);
                    int length = merAndHlaToLength.First.Length;
                    //Debug.Assert(length == merAndHlaToLength.Mer.Length); // real assert
                    lengthToSmoothedCount[length] = 1 + SpecialFunctions.GetValueOrDefault(lengthToSmoothedCount, length, 1);
                }
            }
            return(hlaToLengthToLengthToSmoothedCount);
        }
        ////!!! this could be moved into a class
        //private object CreateKey(Prediction prediction, Best display)
        //{
        //    switch (display)
        //    {
        //        case Best.overall:
        //            return "best";
        //        case Best.perHla:
        //            return prediction.Hla;
        //        case Best.perPrediction:
        //            return prediction;
        //        case  Best.perLength:
        //            return prediction.K;
        //        case Best.perHlaAndLength:
        //            return new Pair<Hla, int>(prediction.Hla, prediction.K);
        //        default:
        //            SpecialFunctions.CheckCondition(false, "Don't know how to display " + display.ToString());
        //            return null;
        //    }
        //}

        /// <summary>
        ///  HlaSetSpecification class choices:
        ///        HlaSetSpecification.Singleton – Means that an Hla will be given and it is the only hla to be considered
        ///        HlaSetSpecification.Supertype – Means that a supertype will be given and it’s hlas should be considered
        ///        HlaSetSpecification.All – Means to consider all known hlas
        /// </summary>
        /// <param name="inputPeptide">a string of amino acids</param>
        /// <param name="merLength">A value from the MerLength enum, which includes MerLength.scan, MerLength.given, MerLength.Eight, etc</param>
        /// <param name="hlaSetSpecification">A predefined HlaSetSpecification class.</param>
        /// <param name="hlaOrSupertypeOrNull">The hla or supertype required by HlaSetSpecification, or null for HlaSetSpecification.All</param>
        /// <param name="modelOnly">If should report the probability from the model, even when the epitope is on a source list.</param>
        /// <returns></returns>
        public IEnumerable <Prediction> PredictionEnumeration(string inputPeptide, MerLength merLength, int?dOfCenter, HlaSetSpecification hlaSetSpecification, string hlaOrSupertypeOrNull, bool modelOnly)
        {
            Set <Hla> hlaSet = HlaSet(hlaSetSpecification, hlaOrSupertypeOrNull);

            foreach (int eLength in KEnumeration(merLength, inputPeptide.Length))
            {
                Predictor predictor = KToPredictor[eLength];
                Dictionary <Hla, double> hlaToPriorLogOdds = KToHlaToPriorLogOdds[eLength];

                int necLength = NCLength + eLength + NCLength;
                foreach (int startIndex in StartIndexEnumeration(inputPeptide.Length, necLength, dOfCenter))
                {
                    string peptide = inputPeptide.Substring(startIndex, necLength);
                    NEC    nec     = NEC.GetInstance(peptide, NCLength, eLength, NCLength);
                    foreach (Hla hla in hlaSet)
                    {
                        Hla    hlaForNormalization = HlaForNormalization(hla);
                        double priorLogOddsOfThisLengthAndHla;
                        if (!hlaToPriorLogOdds.TryGetValue(hlaForNormalization, out priorLogOddsOfThisLengthAndHla))
                        {
                            SpecialFunctions.CheckCondition(!RaiseErrorIfNotFoundInNormalizationTable, string.Format("Hla '{0}' (which is '{1}' for the purposes of normalization) and is not found in the normalization table", hla, hlaForNormalization));
                            priorLogOddsOfThisLengthAndHla = SpecialFunctions.LogOdds(RatioOfTrueToFalseTrainingExample);
                        }


                        string source;
                        double originalP       = predictor.Predict(nec, hla, modelOnly, out source);
                        double originalLogOdds = SpecialFunctions.LogOdds(originalP);

                        double     correctedLogOdds     = originalLogOdds + priorLogOddsOfThisLengthAndHla;
                        double     posteriorProbability = SpecialFunctions.InverseLogOdds(correctedLogOdds);
                        double     weightOfEvidence     = correctedLogOdds - SpecialFunctions.LogOdds(RatioOfTrueToFalseTrainingExample);
                        Prediction prediction           = Prediction.GetInstance(inputPeptide, hla, posteriorProbability, weightOfEvidence, nec, startIndex + NCLength + 1, startIndex + NCLength + eLength, source);
                        yield return(prediction);
                    }
                }
            }
        }
        public static PredictorCollection GetInstance(string modelName)
        {
            PredictorCollection bestPredictorCollection = new PredictorCollection();

            switch (modelName.ToLower())
            {
            case "lanliedb03062007":
                bestPredictorCollection.NCLength            = 0;
                bestPredictorCollection.HasBlanks           = SupertypeSpec.None; //!!!Might want to create supertype set later
                bestPredictorCollection.HlaForNormalization = Identity;
                bestPredictorCollection.RaiseErrorIfNotFoundInNormalizationTable = true;
                bestPredictorCollection.RatioOfTrueToFalseTrainingExample        = .1;
                break;

            default:
                SpecialFunctions.CheckCondition(false, "Don't know of model");
                bestPredictorCollection.NCLength            = int.MinValue;
                bestPredictorCollection.HasBlanks           = SupertypeSpec.ImpossibleValue;
                bestPredictorCollection.HlaForNormalization = null;
                bestPredictorCollection.RaiseErrorIfNotFoundInNormalizationTable = true;
                bestPredictorCollection.RatioOfTrueToFalseTrainingExample        = double.NaN;
                break;
            }

            bestPredictorCollection.KToPredictor = new Dictionary <int, Predictor>();

            for (int k = (int)MerLength.firstLength; k <= (int)MerLength.lastLength; ++k)
            {
                Predictor bestPredictor = Predictor.GetInstance(modelName, k, bestPredictorCollection.NCLength, bestPredictorCollection.HlaForNormalization);
                bestPredictorCollection.KToPredictor.Add(k, bestPredictor);
            }

            bestPredictorCollection.CreateKToHlaToPriorLogOdds();


            return(bestPredictorCollection);
        }