private List <ProductMatch> GetCombinedMatches(Dictionary <double, int> dicOfCommonFragments, DBOptions dbOptions) { List <ProductMatch> matches = new List <ProductMatch>(dicOfCommonFragments.Count); foreach (double mz in dicOfCommonFragments.Keys) { bool found = false; foreach (ProductMatch match in AllFragments) { if (match.theoMz == mz) { matches.Add(new ProductMatch(match)); found = true; } } if (!found) { double sumPsmFactor = 0; ProductMatch newMatch = new ProductMatch(); newMatch.theoMz = mz; newMatch.weight = 0; newMatch.obsIntensity = 0; newMatch.normalizedIntensity = 0; foreach (PeptideSpectrumMatch psm in Psms) { foreach (MsMsPeak peak in psm.Query.spectrum.Peaks) { if (Math.Abs(Proteomics.Utilities.Numerics.CalculateMassError(peak.MZ, mz, dbOptions.productMassTolerance.Units)) <= dbOptions.productMassTolerance.Value) { newMatch.obsIntensity += peak.Intensity * DicOfPsmFactor[psm]; newMatch.normalizedIntensity += (peak.Intensity / (psm.Query.spectrum.PrecursorIntensityPerMilliSecond * psm.Query.spectrum.InjectionTime)) * DicOfPsmFactor[psm]; sumPsmFactor += DicOfPsmFactor[psm]; newMatch.weight++; } } } if (newMatch.weight > 0) { newMatch.obsIntensity /= sumPsmFactor; newMatch.normalizedIntensity /= sumPsmFactor; } newMatch.weight *= newMatch.normalizedIntensity; matches.Add(newMatch); } } double averageNormedIntensity = 0.0; foreach (ProductMatch match in matches) { averageNormedIntensity += match.normalizedIntensity; } if (matches.Count > 0) { averageNormedIntensity /= (double)matches.Count; } //Keep only most intense fragments (5% of average normalized intensity) foreach (ProductMatch pm in matches) { if (pm.normalizedIntensity < averageNormedIntensity * 0.1)//0.05 { pm.normalizedIntensity = 0; pm.obsIntensity = 0; } } return(matches); }
public ActionResult Create(int CorrespondingProductID, int ProductID, string isMatched) { using (var unit = GetUnitOfWork()) { try { var Matched = isMatched != null?isMatched.Contains("on") ? true : false : false; var matchService = unit.Service <ProductMatch>(); ProductMatch productMatch = new ProductMatch(); ProductMatch correspondingMatch = new ProductMatch(); var matchID = matchService.GetAll().Count() > 0 ? matchService.GetAll().Max(x => x.ProductMatchID + 1) : 1; var pID = matchService.Get(i => i.ProductID == ProductID); var cpID = matchService.Get(i => i.ProductID == CorrespondingProductID); if (pID != null && cpID == null) { correspondingMatch.ProductMatchID = pID.ProductMatchID; correspondingMatch.ProductID = CorrespondingProductID; correspondingMatch.isMatched = Matched; correspondingMatch.MatchStatus = 1; matchService.Create(correspondingMatch); unit.Save(); } if (cpID != null && pID == null) { productMatch.ProductMatchID = cpID.ProductMatchID; productMatch.ProductID = ProductID; productMatch.isMatched = Matched; productMatch.MatchStatus = 1; matchService.Create(productMatch); unit.Save(); } if (cpID == null && pID == null) { productMatch.ProductMatchID = matchID; productMatch.ProductID = ProductID; productMatch.isMatched = Matched; productMatch.MatchStatus = 1; matchService.Create(productMatch); unit.Save(); correspondingMatch.ProductMatchID = matchID; correspondingMatch.ProductID = CorrespondingProductID; correspondingMatch.isMatched = Matched; correspondingMatch.MatchStatus = 1; matchService.Create(correspondingMatch); unit.Save(); } if (cpID == pID) { return(Failure("These products share the same identification")); } if (cpID != null && pID != null) { return(Failure("This match already exists")); } return(Success("Product match successfully added")); } catch (Exception ex) { return(Failure(String.Format("Something went wrong while trying to add a product match, {0}", ex))); } } }
private ProductMatch NewProductMatch(int productID, List <ProductMatchResult> matchGroup, int productMatchID, IRepository <ProductMatch> repo) { try { //calculated vendorpartnumber matches var calculatedMatchesVPN = matchGroup.Where(c => c.MatchPercentage == partialVPNMatch).ToList(); //ice cat calculated matches var iceCatSheetMatches = matchGroup.Where(c => c.MatchPercentage == iceCatListMatch).ToList(); //in case that a group contains only calculated matches skip that group if ((iceCatSheetMatches.Count + calculatedMatchesVPN.Count) == matchGroup.Count()) { return(null); } //determine the percentage of vpn match int addedPercentage = 0; if (calculatedMatchesVPN.Count > 0) { var vpnMatch = calculatedMatchesVPN.FirstOrDefault(); //which is the larget vpn string baseVpn = vpnMatch.CVendorItemNumber.Length > vpnMatch.VendorItemNumber.Length ? vpnMatch.CVendorItemNumber : vpnMatch.VendorItemNumber; string shortenedVpn = vpnMatch.CVendorItemNumber.Length > vpnMatch.VendorItemNumber.Length ? vpnMatch.VendorItemNumber : vpnMatch.CVendorItemNumber; //calculate percentage double charPercentage = 100 / baseVpn.Length; double vpnPercentageMatch = charPercentage * shortenedVpn.Length; //get the percentage match //assign correct added percentage addedPercentage += (vpnPercentageMatch >= 60 && vpnPercentageMatch < 70) ? 15 : (vpnPercentageMatch >= 70 && vpnPercentageMatch < 80) ? 20 : (vpnPercentageMatch >= 80 && vpnPercentageMatch <= 100) ? 25 : 0; } if (iceCatSheetMatches.Count > 0) { addedPercentage += iceCatListMatch; } var match = matchGroup.Except(iceCatSheetMatches).Except(calculatedMatchesVPN).OrderByDescending(c => c.MatchPercentage).FirstOrDefault(); //loop the actuals and add them to the product match table var matchPr = Math.Min(match.MatchPercentage + addedPercentage, 100); ProductMatch productMatch = productMatches.FirstOrDefault(x => x.ProductID == productID && x.ProductMatchID == productMatchID); if (productMatch == null) { productMatch = new ProductMatch { ProductID = productID, MatchStatus = (int)MatchStatuses.New, ProductMatchID = productMatchID }; repo.Add(productMatch); productMatches.Add(productMatch); } if (productMatch.MatchStatus == (int)MatchStatuses.New) { productMatch.MatchPercentage = Math.Min(matchPr, 100); productMatch.CalculatedMatch = addedPercentage == 0 ? false : true; productMatch.isMatched = matchPr == 100 ? true : false; } return(productMatch); } catch (Exception e) { log.AuditError("Processing product matches failed", e, "Product matching"); } return(null); }
public static Dictionary <CharacterizedPrecursor, SolvedResult> SolveFromSpectrumBKP(IEnumerable <CharacterizedPrecursor> ratiosToFit, int nbProductsToKeep, long precision, IEnumerable <MsMsPeak> capacity, MassTolerance tolerance, int returnType,//0 for max flow, 1 for best flow, 2 for average double PrecursorIntensityInCTrap, ref double overFlow, ref double underFlow, ref double errorInPercent, IConSol ConSole) { List <List <double> > solutions = new List <List <double> >(); List <long> average = new List <long>(); List <MsMsPeak> expandedCapacity = new List <MsMsPeak>(); double sumOfProducts = 0; foreach (MsMsPeak peak in capacity) { double intensityNormed = peak.Intensity / PrecursorIntensityInCTrap; expandedCapacity.Add(new MsMsPeak(peak.MZ, intensityNormed * precision, peak.Charge)); sumOfProducts += peak.Intensity; } List <List <ProductMatch> > tmpRatiosToFit = new List <List <ProductMatch> >(); //foreach (List<ProductMatch> list in ratiosToFit.Values) foreach (CharacterizedPrecursor prec in ratiosToFit) { List <ProductMatch> pms = new List <ProductMatch>(); foreach (ProductMatch pm in prec.Fragments[nbProductsToKeep]) { ProductMatch newPm = new ProductMatch(pm); newPm.obsIntensity = newPm.normalizedIntensity;// *PrecursorIntensityInCTrap; pms.Add(newPm); } tmpRatiosToFit.Add(pms); } double error = ComputeMaxFlow(tmpRatiosToFit, expandedCapacity, tolerance, ref solutions, ref errorInPercent, ref average, ConSole); double sumOfIntensities = 0; foreach (MsMsPeak peak in expandedCapacity) { sumOfIntensities += peak.Intensity; } overFlow = 0; underFlow = error; List <SolvedResult> result = null; switch (returnType) { case 0: result = GetResultList(solutions[0], precision, underFlow, sumOfIntensities); break; case 1: result = GetResultList(solutions[1], precision, underFlow, sumOfIntensities); break; case 2: List <double> tmpAverage = new List <double>(); foreach (double val in average) { tmpAverage.Add(val); } result = GetResultList(tmpAverage, precision, underFlow, sumOfIntensities); break; } Dictionary <CharacterizedPrecursor, SolvedResult> resultPerSample = new Dictionary <CharacterizedPrecursor, SolvedResult>(); int i = 0; foreach (CharacterizedPrecursor key in ratiosToFit) { resultPerSample.Add(key, result[i]); i++; } return(resultPerSample); }