private void AddHitForSiteAndNeighbors(int i, IBins <bool> x, int toExtendTo) { x.AddHit(i); for (int j = 0; j <= toExtendTo; j++) { var binIndex = i - j; if (binIndex >= 0) { x.AddHit(binIndex); } else { break; } } for (int j = 0; j <= toExtendTo; j++) { var binIndex = i + j; if (binIndex < _binEvidence.NumBins) { x.AddHit(binIndex); } else { break; } } }
public BinConclusions(IBinEvidence binEvidence, bool collectDepth, bool trackDirectionalMess = false, bool trackMapqMess = false) { _binEvidence = binEvidence; _collectDepth = collectDepth; var numBins = _binEvidence.NumBins; _isMessyEnough = new SparseGroupedBoolBins(numBins); _indelRegions = new SparseGroupedBoolBins(numBins); if (trackDirectionalMess) { _fwdMessyStatus = new SparseGroupedBoolBins(numBins); _revMessyStatus = new SparseGroupedBoolBins(numBins); } else { _fwdMessyStatus = new DummyBins <bool>(); _revMessyStatus = new DummyBins <bool>(); } if (trackMapqMess) { _mapqMessyStatus = new SparseGroupedBoolBins(numBins); } else { _mapqMessyStatus = new DummyBins <bool>(); } //if (_avoidLikelySnvs) { _probableTrueSnvRegions = new SparseGroupedBoolBins(numBins, 10); } }
public static void CheckBins(Dictionary <int, int> nonZeroBinValues, IBins <int> intBins) { for (int i = 0; i < 100; i++) { if (!nonZeroBinValues.ContainsKey(i)) { Assert.Equal(0, intBins.GetHit(i)); } } foreach (var bin in nonZeroBinValues) { Assert.Equal(bin.Value, intBins.GetHit(bin.Key)); } }
public void Merge(IBins <int> otherBins, int binOffset, int startBinInOther, int endBinInOther) { var startBinInThis = startBinInOther - binOffset; var endBinInThis = Math.Min(_numBins, endBinInOther - binOffset); for (int i = startBinInThis; i <= endBinInThis; i++) { var binIdInOtherBins = i + binOffset; // Note, this keeps checking even if we've gone past the range of the other guy var otherHit = otherBins.GetHit(binIdInOtherBins, false); if (otherHit > 0) { IncrementHit(i, otherHit); } } }
/// <summary> /// Increment the hits of this bin with the hits from otherBins. Assumes they are on the same scale, /// and will not go past the current bins. If offset not provided, also assumes start positions are the same. /// </summary> /// <param name="otherBins"></param> /// <param name="binOffset"></param> public void Merge(IBins <T> otherBins, int binOffset, int startBinInOther, int endBinInOther) { var defaultResult = ReturnDefault(); var startBinInThis = startBinInOther - binOffset; var endBinInThis = Math.Min(_numBins, endBinInOther - binOffset); for (int i = startBinInThis; i <= endBinInThis; i++) { var binIndexInOther = i + binOffset; // Note, this keeps checking even if we've gone past the range of the other guy var otherHit = otherBins.GetHit(binIndexInOther); if (!otherHit.Equals(defaultResult)) { MergeHits(i, otherHit); } } }
public BinEvidence(int refId, bool collectDepth, int numBins, bool avoidLikelySnvs, int siteWidth, int regionStart, bool trackDirectionalMess = false, bool trackMapqMess = false) { _refId = refId; _collectDepth = collectDepth; NumBins = numBins; _avoidLikelySnvs = avoidLikelySnvs; _siteWidth = siteWidth; _regionStart = regionStart; // The below constants are in place to set a balance between opening up too-large groups for truly sparse data, and opening up too many small groups for globally sparse but locally dense data // The values of the constants themselves are not really scientific and could be further honed. There is no anlaytical consequence, just performance. const int groupSizeForCommonlyHitCategory = 500; const int groupSizeForSparselyHitCategory = 50; _messyHits = new SparseGroupedIntBins(NumBins, groupSizeForCommonlyHitCategory); _indelHits = new SparseGroupedIntBins(NumBins, groupSizeForSparselyHitCategory); if (trackDirectionalMess) { _fwdOnlyMessyHits = new SparseGroupedIntBins(NumBins, groupSizeForSparselyHitCategory); _revOnlyMessyHits = new SparseGroupedIntBins(NumBins, groupSizeForSparselyHitCategory); } else { _fwdOnlyMessyHits = new DummyBins <int>(); _revOnlyMessyHits = new DummyBins <int>(); } if (trackMapqMess) { _mapqMessyHits = new SparseGroupedIntBins(NumBins, groupSizeForSparselyHitCategory); } else { _mapqMessyHits = new DummyBins <int>(); } AllHits = new DenseBins(NumBins); _singleMismatchHits = new SparseGroupedIntBins(NumBins, groupSizeForCommonlyHitCategory); //_singleMismatchHits = new DenseBins(NumBins); // TODO reinstate when we add single mismatch feature back StartPosition = regionStart; }
public void Merge(IBins <T> otherBins, int binOffset, int startBinInOther, int endBinInOther) { }
public void ResetIndelRegions() { _indelRegions = new SparseGroupedBoolBins(_binEvidence.NumBins); }
public UsableBins(IBinConclusions binConclusions) { _binConclusions = binConclusions; _numBins = binConclusions.NumBins; _sitesUsable = new SparseGroupedBoolBins(_numBins); }