/// <summary> /// Given a set of ranges calculated by <see cref="AllocWeights(long, long, int)"/>, /// iterate through the weights. /// The ranges are modified to keep the current iteration state. /// </summary> /// <returns>The next weight in the ranges, or 0xffffffff if there is none left.</returns> public long NextWeight() { if (rangeIndex >= rangeCount) { return(0xffffffffL); } else { /* get the next weight */ WeightRange range = ranges[rangeIndex]; long weight = range.Start; if (--range.Count == 0) { /* this range is finished */ ++rangeIndex; } else { /* increment the weight for the next value */ range.Start = IncWeight(weight, range.Length); Debug.Assert(range.Start <= range.End); } return(weight); } }
public void Equals_Should_Return_Desired_Results(double xFrom, double xTo, double yFrom, double yTo, bool expectedResult) { var x = new WeightRange(xFrom, xTo); var y = new WeightRange(yFrom, yTo); var result = x.Equals(y); Assert.Equal(expectedResult, result); }
private void LengthenRange(WeightRange range) { int length = range.Length + 1; range.Start = SetWeightTrail(range.Start, length, minBytes[length]); range.End = SetWeightTrail(range.End, length, maxBytes[length]); range.Count *= CountBytes(length); range.Length = length; }
public IEnumerable <NewsEntryInfo> GetNewsEntries_Page(int moduleId, int portalId, WeightRange thematicRange, WeightRange structRange, bool showAllNews, IList <Term> includeTerms) { if (showAllNews) { return(GetAllNewsEntries(moduleId, portalId, thematicRange, structRange)); } return(GetNewsEntriesByTerms(moduleId, portalId, thematicRange, structRange, includeTerms)); }
public int GetAllNewsEntries_Count(int portalId, DateTime?now, WeightRange thematicWeights, WeightRange structuralWeights) { return(NewsDataProvider.Instance.ExecuteSpScalar <int> ( SpNamePrefix + "GetNewsEntries_Count", portalId, now, thematicWeights.Min, thematicWeights.Max, structuralWeights.Min, structuralWeights.Max )); }
public void GetHashCode_Should_Return_Desired_Results(double xFrom, double xTo, double yFrom, double yTo, bool expectedResult) { var x = new WeightRange(xFrom, xTo); var y = new WeightRange(yFrom, yTo); var xResult = x.GetHashCode(); var yResult = y.GetHashCode(); bool actualResult = xResult == yResult; Assert.Equal(expectedResult, actualResult); }
protected IEnumerable <NewsEntryInfo> GetAllNewsEntriesInternal(int portalId, WeightRange thematicWeights, WeightRange structuralWeights) { return(NewsDataProvider.Instance.GetObjects <NewsEntryInfo> ( System.Data.CommandType.StoredProcedure, SpNamePrefix + "GetNewsEntries", portalId, thematicWeights.Min, thematicWeights.Max, structuralWeights.Min, structuralWeights.Max) .WithContentItems() .WithAgentModules(NewsDataProvider.Instance.ModuleController) .Cast <NewsEntryInfo> ()); }
public override int GetHashCode() { unchecked { // ReSharper disable NonReadonlyMemberInGetHashCode var hashCode = WeightRange.GetHashCode(); hashCode = (hashCode * 397) ^ Kilograms.GetHashCode(); hashCode = (hashCode * 397) ^ IsMetric.GetHashCode(); // ReSharper restore NonReadonlyMemberInGetHashCode return(hashCode); } }
public IEnumerable <NewsEntryInfo> GetAllNewsEntries(int moduleId, int portalId, WeightRange thematicWeights, WeightRange structuralWeights) { var cacheKey = NewsCacheKeyPrefix + "ModuleId=" + moduleId; return(DataCache.GetCachedData <IEnumerable <NewsEntryInfo> > ( new CacheItemArgs(cacheKey, NewsConfig.GetInstance(portalId).DataCacheTime, CacheItemPriority.Normal), c => GetAllNewsEntriesInternal(portalId, thematicWeights, structuralWeights) )); }
public IEnumerable <NewsEntryInfo> GetNewsEntries_FirstPage(int portalId, int pageSize, DateTime?now, WeightRange thematicRange, WeightRange structRange, bool showAllNews, IList <Term> includeTerms, out int newsEntriesCount) { if (showAllNews) { newsEntriesCount = GetAllNewsEntries_Count(portalId, now, thematicRange, structRange); return(GetAllNewsEntries_FirstPage(portalId, pageSize, now, thematicRange, structRange)); } newsEntriesCount = GetNewsEntriesByTerms_Count(portalId, now, thematicRange, structRange, includeTerms); return(GetNewsEntriesByTerms_FirstPage(portalId, pageSize, now, thematicRange, structRange, includeTerms)); }
public IEnumerable <NewsEntryInfo> GetAllNewsEntries_FirstPage(int portalId, int pageSize, DateTime?now, WeightRange thematicWeights, WeightRange structuralWeights) { return(NewsDataProvider.Instance.GetObjectsFromSp <NewsEntryInfo> ( SpNamePrefix + "GetNewsEntries_FirstPage", portalId, pageSize, now, thematicWeights.Min, thematicWeights.Max, structuralWeights.Min, structuralWeights.Max) .WithContentItems() .WithAgentModules(NewsDataProvider.Instance.ModuleController) .Cast <NewsEntryInfo> ()); }
protected IEnumerable <NewsEntryInfo> GetNewsEntries(int moduleId, int portalId, WeightRange thematicWeights, WeightRange structuralWeights, bool showAllNews, List <Term> includeTerms) { if (showAllNews) { return(NewsRepository.Instance.GetNewsEntries( moduleId, portalId, thematicWeights, structuralWeights )); } return(NewsRepository.Instance.GetNewsEntriesByTerms( moduleId, portalId, thematicWeights, structuralWeights, includeTerms )); }
public int GetNewsEntriesByTerms_Count(int portalId, DateTime?now, WeightRange thematicWeights, WeightRange structuralWeights, IList <Term> terms) { Contract.Requires(terms != null); if (terms.Count > 0) { return(NewsDataProvider.Instance.ExecuteSpScalar <int> ( SpNamePrefix + "GetNewsEntriesByTerms_Count", portalId, now, thematicWeights.Min, thematicWeights.Max, structuralWeights.Min, structuralWeights.Max, terms.Select(t => t.TermId).ToArray() )); } return(0); }
protected IEnumerable <NewsEntryInfo> GetNewsEntriesByTermsInternal(int portalId, WeightRange thematicWeights, WeightRange structuralWeights, IList <Term> terms) { Contract.Requires(terms != null); if (terms.Count > 0) { return(NewsDataProvider.Instance.GetObjects <NewsEntryInfo> ( System.Data.CommandType.StoredProcedure, SpNamePrefix + "GetNewsEntriesByTerms", portalId, thematicWeights.Min, thematicWeights.Max, structuralWeights.Min, structuralWeights.Max, terms.Select(t => t.TermId).ToArray()) .WithContentItems() .WithAgentModules(NewsDataProvider.Instance.ModuleController) .Cast <NewsEntryInfo> ()); } return(Enumerable.Empty <NewsEntryInfo> ()); }
protected IEnumerable <NewsEntryInfo> GetNewsEntriesByTerms_FirstPage(int portalId, int pageSize, DateTime?now, WeightRange thematicWeights, WeightRange structuralWeights, IList <Term> terms) { Contract.Requires(terms != null); if (terms.Count > 0) { return(NewsDataProvider.Instance.GetObjectsFromSp <NewsEntryInfo> (SpNamePrefix + "GetNewsEntriesByTerms_FirstPage", portalId, pageSize, now, thematicWeights.Min, thematicWeights.Max, structuralWeights.Min, structuralWeights.Max, terms.Select(t => t.TermId).ToArray()) .WithContentItems() .WithAgentModules(NewsDataProvider.Instance.ModuleController) .Cast <NewsEntryInfo> ()); } return(Enumerable.Empty <NewsEntryInfo> ()); }
/// <summary> /// Takes two CE weights and calculates the /// possible ranges of weights between the two limits, excluding them. /// For weights with up to 4 bytes there are up to 2*4-1=7 ranges. /// </summary> private bool GetWeightRanges(long lowerLimit, long upperLimit) { Debug.Assert(lowerLimit != 0); Debug.Assert(upperLimit != 0); /* get the lengths of the limits */ int lowerLength = LengthOfWeight(lowerLimit); int upperLength = LengthOfWeight(upperLimit); // printf("length of lower limit 0x%08lx is %ld\n", lowerLimit, lowerLength); // printf("length of upper limit 0x%08lx is %ld\n", upperLimit, upperLength); Debug.Assert(lowerLength >= middleLength); // Permit upperLength<middleLength: The upper limit for secondaries is 0x10000. if (lowerLimit >= upperLimit) { // printf("error: no space between lower & upper limits\n"); return(false); } /* check that neither is a prefix of the other */ if (lowerLength < upperLength) { if (lowerLimit == TruncateWeight(upperLimit, lowerLength)) { // printf("error: lower limit 0x%08lx is a prefix of upper limit 0x%08lx\n", lowerLimit, upperLimit); return(false); } } /* if the upper limit is a prefix of the lower limit then the earlier test lowerLimit>=upperLimit has caught it */ WeightRange[] lower = new WeightRange[5]; /* [0] and [1] are not used - this simplifies indexing */ WeightRange middle = new WeightRange(); WeightRange[] upper = new WeightRange[5]; /* * With the limit lengths of 1..4, there are up to 7 ranges for allocation: * range minimum length * lower[4] 4 * lower[3] 3 * lower[2] 2 * middle 1 * upper[2] 2 * upper[3] 3 * upper[4] 4 * * We are now going to calculate up to 7 ranges. * Some of them will typically overlap, so we will then have to merge and eliminate ranges. */ long weight = lowerLimit; for (int length = lowerLength; length > middleLength; --length) { int trail = GetWeightTrail(weight, length); if (trail < maxBytes[length]) { lower[length] = new WeightRange(); lower[length].Start = IncWeightTrail(weight, length); lower[length].End = SetWeightTrail(weight, length, maxBytes[length]); lower[length].Length = length; lower[length].Count = maxBytes[length] - trail; } weight = TruncateWeight(weight, length - 1); } if (weight < 0xff000000L) { middle.Start = IncWeightTrail(weight, middleLength); } else { // Prevent overflow for primary lead byte FF // which would yield a middle range starting at 0. middle.Start = 0xffffffffL; // no middle range } weight = upperLimit; for (int length = upperLength; length > middleLength; --length) { int trail = GetWeightTrail(weight, length); if (trail > minBytes[length]) { upper[length] = new WeightRange(); upper[length].Start = SetWeightTrail(weight, length, minBytes[length]); upper[length].End = DecWeightTrail(weight, length); upper[length].Length = length; upper[length].Count = trail - minBytes[length]; } weight = TruncateWeight(weight, length - 1); } middle.End = DecWeightTrail(weight, middleLength); /* set the middle range */ middle.Length = middleLength; if (middle.End >= middle.Start) { middle.Count = (int)((middle.End - middle.Start) >> (8 * (4 - middleLength))) + 1; } else { /* no middle range, eliminate overlaps */ for (int length = 4; length > middleLength; --length) { if (lower[length] != null && upper[length] != null && lower[length].Count > 0 && upper[length].Count > 0) { // Note: The lowerEnd and upperStart weights are versions of // lowerLimit and upperLimit (which are lowerLimit<upperLimit), // truncated (still less-or-equal) // and then with their last bytes changed to the // maxByte (for lowerEnd) or minByte (for upperStart). long lowerEnd = lower[length].End; long upperStart = upper[length].Start; bool merged = false; if (lowerEnd > upperStart) { // These two lower and upper ranges collide. // Since lowerLimit<upperLimit and lowerEnd and upperStart // are versions with only their last bytes modified // (and following ones removed/reset to 0), // lowerEnd>upperStart is only possible // if the leading bytes are equal // and lastByte(lowerEnd)>lastByte(upperStart). Debug.Assert(TruncateWeight(lowerEnd, length - 1) == TruncateWeight(upperStart, length - 1)); // Intersect these two ranges. lower[length].End = upper[length].End; lower[length].Count = GetWeightTrail(lower[length].End, length) - GetWeightTrail(lower[length].Start, length) + 1; // count might be <=0 in which case there is no room, // and the range-collecting code below will ignore this range. merged = true; } else if (lowerEnd == upperStart) { // Not possible, unless minByte==maxByte which is not allowed. Debug.Assert(minBytes[length] < maxBytes[length]); } else /* lowerEnd<upperStart */ { if (IncWeight(lowerEnd, length) == upperStart) { // Merge adjacent ranges. lower[length].End = upper[length].End; lower[length].Count += upper[length].Count; // might be >countBytes merged = true; } } if (merged) { // Remove all shorter ranges. // There was no room available for them between the ranges we just merged. upper[length].Count = 0; while (--length > middleLength) { lower[length] = upper[length] = null; } break; } } } } /* print ranges * for(int length=4; length>=2; --length) { * if(lower[length].count>0) { * printf("lower[%ld] .start=0x%08lx .end=0x%08lx .count=%ld\n", length, lower[length].start, lower[length].end, lower[length].count); * } * } * if(middle.count>0) { * printf("middle .start=0x%08lx .end=0x%08lx .count=%ld\n", middle.start, middle.end, middle.count); * } * for(int length=2; length<=4; ++length) { * if(upper[length].count>0) { * printf("upper[%ld] .start=0x%08lx .end=0x%08lx .count=%ld\n", length, upper[length].start, upper[length].end, upper[length].count); * } * } */ /* copy the ranges, shortest first, into the result array */ rangeCount = 0; if (middle.Count > 0) { ranges[0] = middle; rangeCount = 1; } for (int length = middleLength + 1; length <= 4; ++length) { /* copy upper first so that later the middle range is more likely the first one to use */ if (upper[length] != null && upper[length].Count > 0) { ranges[rangeCount++] = upper[length]; } if (lower[length] != null && lower[length].Count > 0) { ranges[rangeCount++] = lower[length]; } } return(rangeCount > 0); }
private bool AllocWeightsInMinLengthRanges(int n, int minLength) { // See if the minLength ranges have enough weights // when we split one and lengthen the following ones. int count = 0; int minLengthRangeCount; for (minLengthRangeCount = 0; minLengthRangeCount < rangeCount && ranges[minLengthRangeCount].Length == minLength; ++minLengthRangeCount) { count += ranges[minLengthRangeCount].Count; } int nextCountBytes = CountBytes(minLength + 1); if (n > count * nextCountBytes) { return(false); } // Use the minLength ranges. Merge them, and then split again as necessary. long start = ranges[0].Start; long end = ranges[0].End; for (int i = 1; i < minLengthRangeCount; ++i) { if (ranges[i].Start < start) { start = ranges[i].Start; } if (ranges[i].End > end) { end = ranges[i].End; } } // Calculate how to split the range between minLength (count1) and minLength+1 (count2). // Goal: // count1 + count2 * nextCountBytes = n // count1 + count2 = count // These turn into // (count - count2) + count2 * nextCountBytes = n // and then into the following count1 & count2 computations. int count2 = (n - count) / (nextCountBytes - 1); // number of weights to be lengthened int count1 = count - count2; // number of minLength weights if (count2 == 0 || (count1 + count2 * nextCountBytes) < n) { // round up ++count2; --count1; Debug.Assert((count1 + count2 * nextCountBytes) >= n); } ranges[0].Start = start; if (count1 == 0) { // Make one long range. ranges[0].End = end; ranges[0].Count = count; LengthenRange(ranges[0]); rangeCount = 1; } else { // Split the range, lengthen the second part. // printf("split the range number %ld (out of %ld minLength ranges) by %ld:%ld\n", // splitRange, rangeCount, count1, count2); // Next start = start + count1. First end = 1 before that. ranges[0].End = IncWeightByOffset(start, minLength, count1 - 1); ranges[0].Count = count1; if (ranges[1] == null) { ranges[1] = new WeightRange(); } ranges[1].Start = IncWeight(ranges[0].End, minLength); ranges[1].End = end; ranges[1].Length = minLength; // +1 when lengthened ranges[1].Count = count2; // *countBytes when lengthened LengthenRange(ranges[1]); rangeCount = 2; } return(true); }
public static Weight FromRange(WeightRange weightRange) { return(new Weight(weightRange, null, false)); }