private void LoadUpPredictor(string modelName, int eLength, int ncLength, Converter <Hla, Hla> hlaForNormalization) { //Load up the predictor string featurerizerName; switch (modelName.ToLower()) { //!!!would be better not to have multiple of these switch statements around - looks like a job for a Class case "lanliedb03062007": featurerizerName = "[email protected]"; SampleNEC = NEC.GetInstance("", new string(' ', eLength), ""); HlaFactory = HlaFactory.GetFactory("MixedWithB15AndA68"); SourceDataFileName = "lanlIedb03062007.pos.source.txt"; NameList = new string[] { "LANL", "IEDB" }; break; default: SpecialFunctions.CheckCondition(false, "Don't know what featurerizer to use for the model"); featurerizerName = null; SourceDataFileName = null; NameList = null; break; } Converter <object, Set <IHashableFeature> > featurizer = FeatureLib.CreateFeaturizer(featurerizerName); //GeneratorType generatorType = GeneratorType.ComboAndZero6SuperType; //FeatureSerializer featureSerializer = PositiveNegativeExperimentCollection.GetFeatureSerializer(); //KmerDefinition = kmerDefinition; //HlaResolution hlaResolution = HlaResolution.ABMixed; string resourceName = string.Format("maxentModel{0}{1}{2}{3}.xml", modelName.Split('.')[0], SampleNEC.N.Length, SampleNEC.E.Length, SampleNEC.C.Length); EpitopeLearningDataList = new List <EpitopeLearningDataDupHlaOK>(); using (StreamReader streamReader = Predictor.OpenResource(resourceName)) { Logistic = (Logistic)FeatureLib.FeatureSerializer.FromXmlStreamReader(streamReader); //Logistic.FeatureGenerator = EpitopeFeatureGenerator.GetInstance(KmerDefinition, generatorType, featureSerializer).GenerateFeatureSet; Logistic.FeatureGenerator = FeatureLib.CreateFeaturizer(featurerizerName); foreach (string name in NameList) { EpitopeLearningData epitopeLearningDataX = EpitopeLearningData.GetDbWhole(HlaFactory, SampleNEC.E.Length, name, SourceDataFileName); Debug.Assert(epitopeLearningDataX.Count > 0, "Expect given data to have some data"); //!!!combine with previous step EpitopeLearningDataDupHlaOK epitopeLearningData = new EpitopeLearningDataDupHlaOK(epitopeLearningDataX.Name); foreach (KeyValuePair <Pair <string, Hla>, bool> merAndHlaAndLabel in epitopeLearningDataX) { Hla hlaIn = merAndHlaAndLabel.Key.Second; Hla hlaOut = hlaForNormalization(hlaIn); Dictionary <Hla, Dictionary <Hla, bool> > hla2ToHlaToLabel = SpecialFunctions.GetValueOrDefault(epitopeLearningData, merAndHlaAndLabel.Key.First); Dictionary <Hla, bool> hlaToLabel = SpecialFunctions.GetValueOrDefault(hla2ToHlaToLabel, hlaOut); hlaToLabel.Add(hlaIn, merAndHlaAndLabel.Value); } EpitopeLearningDataList.Add(epitopeLearningData); } } HlaForNormalization = hlaForNormalization; }
//!!!very similar to other code public static Dictionary <Pair <NEC, Hla>, bool> ReadTable(HlaFactory hlaFactory, string fileName, bool dedup) { Dictionary <Pair <NEC, Hla>, bool> table = new Dictionary <Pair <NEC, Hla>, bool>(); foreach (Dictionary <string, string> row in SpecialFunctions.TabFileTable(fileName, "N\tepitope\tC\thla\tlabel", false)) { string n = row["N"]; string epitope = row["epitope"]; SpecialFunctions.CheckCondition(Biology.GetInstance().LegalPeptide(epitope), string.Format("Peptide, '{0}', contains illegal char.", epitope)); string c = row["C"]; NEC nec = NEC.GetInstance(n, epitope, c); Hla hla = hlaFactory.GetGroundInstance(row["hla"]); string labelString = row["label"]; SpecialFunctions.CheckCondition(labelString == "0" || labelString == "1", "Expect label to be '0' or '1'"); Pair <NEC, Hla> pair = new Pair <NEC, Hla>(nec, hla); bool labelAsBool = (labelString == "1"); if (dedup && table.ContainsKey(pair)) { SpecialFunctions.CheckCondition(table[pair] == labelAsBool, "The example " + pair.ToString() + " appears with contradictory labels."); continue; } table.Add(pair, labelAsBool); } return(table); }
////!!! this could be moved into a class //private object CreateKey(Prediction prediction, Best display) //{ // switch (display) // { // case Best.overall: // return "best"; // case Best.perHla: // return prediction.Hla; // case Best.perPrediction: // return prediction; // case Best.perLength: // return prediction.K; // case Best.perHlaAndLength: // return new Pair<Hla, int>(prediction.Hla, prediction.K); // default: // SpecialFunctions.CheckCondition(false, "Don't know how to display " + display.ToString()); // return null; // } //} /// <summary> /// HlaSetSpecification class choices: /// HlaSetSpecification.Singleton – Means that an Hla will be given and it is the only hla to be considered /// HlaSetSpecification.Supertype – Means that a supertype will be given and it’s hlas should be considered /// HlaSetSpecification.All – Means to consider all known hlas /// </summary> /// <param name="inputPeptide">a string of amino acids</param> /// <param name="merLength">A value from the MerLength enum, which includes MerLength.scan, MerLength.given, MerLength.Eight, etc</param> /// <param name="hlaSetSpecification">A predefined HlaSetSpecification class.</param> /// <param name="hlaOrSupertypeOrNull">The hla or supertype required by HlaSetSpecification, or null for HlaSetSpecification.All</param> /// <param name="modelOnly">If should report the probability from the model, even when the epitope is on a source list.</param> /// <returns></returns> public IEnumerable <Prediction> PredictionEnumeration(string inputPeptide, MerLength merLength, int?dOfCenter, HlaSetSpecification hlaSetSpecification, string hlaOrSupertypeOrNull, bool modelOnly) { Set <Hla> hlaSet = HlaSet(hlaSetSpecification, hlaOrSupertypeOrNull); foreach (int eLength in KEnumeration(merLength, inputPeptide.Length)) { Predictor predictor = KToPredictor[eLength]; Dictionary <Hla, double> hlaToPriorLogOdds = KToHlaToPriorLogOdds[eLength]; int necLength = NCLength + eLength + NCLength; foreach (int startIndex in StartIndexEnumeration(inputPeptide.Length, necLength, dOfCenter)) { string peptide = inputPeptide.Substring(startIndex, necLength); NEC nec = NEC.GetInstance(peptide, NCLength, eLength, NCLength); foreach (Hla hla in hlaSet) { Hla hlaForNormalization = HlaForNormalization(hla); double priorLogOddsOfThisLengthAndHla; if (!hlaToPriorLogOdds.TryGetValue(hlaForNormalization, out priorLogOddsOfThisLengthAndHla)) { SpecialFunctions.CheckCondition(!RaiseErrorIfNotFoundInNormalizationTable, string.Format("Hla '{0}' (which is '{1}' for the purposes of normalization) and is not found in the normalization table", hla, hlaForNormalization)); priorLogOddsOfThisLengthAndHla = SpecialFunctions.LogOdds(RatioOfTrueToFalseTrainingExample); } string source; double originalP = predictor.Predict(nec, hla, modelOnly, out source); double originalLogOdds = SpecialFunctions.LogOdds(originalP); double correctedLogOdds = originalLogOdds + priorLogOddsOfThisLengthAndHla; double posteriorProbability = SpecialFunctions.InverseLogOdds(correctedLogOdds); double weightOfEvidence = correctedLogOdds - SpecialFunctions.LogOdds(RatioOfTrueToFalseTrainingExample); Prediction prediction = Prediction.GetInstance(inputPeptide, hla, posteriorProbability, weightOfEvidence, nec, startIndex + NCLength + 1, startIndex + NCLength + eLength, source); yield return(prediction); } } } }