public void PURQBinaryIndexedTree()
        {
            var rand = new Random();

            for (int a = 0; a < _sourceArrays.Length; ++a)
            {
                int[] sourceArray           = _sourceArrays[a];
                var   purqBinaryIndexedTree = new PURQBinaryIndexedTree(sourceArray);

                for (int r = 0; r < 1000; ++r)
                {
                    int firstIndex  = rand.Next(0, sourceArray.Length);
                    int secondIndex = rand.Next(0, sourceArray.Length);
                    int mode        = rand.Next(2);

                    if (mode == 0)
                    {
                        NaiveBinaryIndexedTreeAlternatives.PointUpdate(sourceArray, firstIndex, delta: r);
                        purqBinaryIndexedTree.PointUpdate(firstIndex, delta: r);
                    }
                    else
                    {
                        int startIndex = Math.Min(firstIndex, secondIndex);
                        int endIndex   = Math.Max(firstIndex, secondIndex);

                        int expected = NaiveBinaryIndexedTreeAlternatives.SumQuery(sourceArray, startIndex, endIndex);
                        Assert.AreEqual(expected, purqBinaryIndexedTree.SumQuery(startIndex, endIndex));
                    }
                }
            }
        }
Exemplo n.º 2
0
 public ORDERSET(HashSet <int> possibleValues)
 {
     _possibleValues  = possibleValues;
     _orderedValues   = possibleValues.OrderBy(v => v).ToArray();
     _valueIsInserted = new bool[_orderedValues.Length];
     _insertionBIT    = new PURQBinaryIndexedTree(_orderedValues.Length);
 }
Exemplo n.º 3
0
    public static int[] Solve(int[] sourceArray, DistinctCountQuery[] queries)
    {
        int[] queryResults = new int[queries.Length];

        // Queries are performed in phases, a phase for each of the sourceArray.Length possible
        // query end indices. The query start index doesn't matter, just the fact that all queries
        // in a phase share an end index. The phases will proceed in ascending order of query end
        // indices, which is why the query objects are sorted that way below. A PURQ BIT is queried
        // within phases and updated between them. For any given phase, the PURQ BIT is always in a
        // state such that it can only answer distinct count queries which have an end index equal
        // to the phase's end index. The BIT's underlying array has 0s and 1s, where a 1 at an index
        // means the value there is the latest occurrence of the value up to the phase's end index.
        // The BIT returns sums like normal, but with this construction the sums correspond to the
        // distinct count of values within the queried range. That's because for a given phase, all
        // queries extend up to the phase's end index. So for any value known to be within the queried
        // range, the latest occurrence of the value up to the phase's end index is definitely within
        // the range, and its underlying BIT value accounts for a single 1 added to the returned sum.
        // After a phase is complete, we increment the query end index for the next phase, update the
        // BIT so the value there has a 1 (it's last, so definitely the latest for its value), and
        // turn off any earlier value marked with a 1, since it's no longer the latest.

        // Sort queries by ascending query end index.
        Array.Sort(queries, (q1, q2) => q1.QueryEndIndex.CompareTo(q2.QueryEndIndex));

        var latestOccurrenceBIT           = new PURQBinaryIndexedTree(sourceArray.Length);
        var valuesLatestOccurrenceIndices = new Dictionary <int, int>(sourceArray.Length);
        int queryIndex = 0;

        for (int phaseEndIndex = 0;
             phaseEndIndex < sourceArray.Length && queryIndex < queries.Length;
             ++phaseEndIndex)
        {
            int endValue = sourceArray[phaseEndIndex];
            int endValuesPreviousLatestOccurrenceIndex;
            if (valuesLatestOccurrenceIndices.TryGetValue(
                    endValue, out endValuesPreviousLatestOccurrenceIndex))
            {
                latestOccurrenceBIT.PointUpdate(endValuesPreviousLatestOccurrenceIndex, -1);
            }
            latestOccurrenceBIT.PointUpdate(phaseEndIndex, 1);
            valuesLatestOccurrenceIndices[endValue] = phaseEndIndex;

            DistinctCountQuery query;
            while (queryIndex < queries.Length &&
                   (query = queries[queryIndex]).QueryEndIndex == phaseEndIndex)
            {
                queryResults[query.ResultIndex] = latestOccurrenceBIT.SumQuery(
                    query.QueryStartIndex, phaseEndIndex);
                ++queryIndex;
            }
        }

        return(queryResults);
    }
Exemplo n.º 4
0
        private void PURQSumQuery()
        {
            var purqBinaryIndexedTree = new PURQBinaryIndexedTree(_array);

            for (int i = 0; i < _randomRangesCount; ++i)
            {
                Tuple <int, int> range = _randomRanges[i];

                purqBinaryIndexedTree.SumQuery(range.Item1, range.Item2);
            }
        }
Exemplo n.º 5
0
        private void PURQRangeUpdate()
        {
            var purqBinaryIndexedTree = new PURQBinaryIndexedTree(_array);

            for (int i = 0; i < _randomRangesCount; ++i)
            {
                Tuple <int, int> range = _randomRanges[i];

                for (int j = range.Item1; j <= range.Item2; ++j)
                {
                    purqBinaryIndexedTree.PointUpdate(j, 1);
                }
            }
        }
Exemplo n.º 6
0
    // Insertion sort looks like this: [sorted part]k[unsorted part]. When adding the
    // kth element to the sorted part, we swap it with however many larger elements
    // there are to its left. The number of larger elements to its left doesn't change
    // as those elements get sorted, so this problem is equivalent to INVCNT. The array
    // size is limited to 100k elements, but that could be 100k * (100k - 1) / 2
    // inversions, so we need to use long when counting. I tried using same code as
    // INVCNT but got TLE. Hints pointed at BIT, and reviewing DQUERY led to the idea.

    // Element values in the array are limited to <= 1 million. Say we're at the ith
    // index in the array. We want to use the BIT to figure out how many values greater
    // than a[i] have already been seen. All we need to do is make sure we've incremented
    // the BIT for each a[j], j before i. Then we can do a range query from a[i]+1 to
    // the limit of a million. For example, say the array is [9 6 9 4 5 1 2]. The BIT
    // goes up to a million. By the time we get the value 5, 9 has been incremented twice,
    // 6 has been incremented once, and 4 has been incremented once. We then sum from
    // 6 (one more than 5) to a million (the limit), and see that 5 is inverted with
    // 3 values to its left (9, 9 and 6, but not 4).
    // ...But what would we do if the limit were much higher? Self-balancing BST?
    public static long Solve(int[] array)
    {
        var elementBIT = new PURQBinaryIndexedTree(
            // Max value (1 million) correponds to max index => array length is +1 of that.
            arrayLength: _elementLimit + 1);
        long inversionCount = 0;

        for (int i = 1; i < array.Length; ++i)
        {
            elementBIT.PointUpdate(array[i - 1], 1);
            inversionCount += elementBIT.SumQuery(array[i] + 1, _elementLimit);
        }

        return(inversionCount);
    }
Exemplo n.º 7
0
        private void PURQRandomOperation()
        {
            var purqBinaryIndexedTree = new PURQBinaryIndexedTree(_array);

            for (int i = 0; i < _randomRangesCount; ++i)
            {
                Tuple <int, int> range = _randomRanges[i];

                if (range.Item1 % 2 == 0)
                {
                    purqBinaryIndexedTree.SumQuery(range.Item1, range.Item2);
                }
                else
                {
                    for (int j = range.Item1; j <= range.Item2; ++j)
                    {
                        purqBinaryIndexedTree.PointUpdate(j, 1);
                    }
                }
            }
        }
Exemplo n.º 8
0
    // This problem is similar to DQUERY, but requires less creativity I think. It
    // was easier to think of the way to use the BIT.
    public static int[] SolveOffline(int[] sourceArray, GreaterThanQuery[] queries)
    {
        int[] queryResults = new int[queries.Length];

        // Sort source array values by descending value, but remember their original index.
        var orderedSourceValues = sourceArray
                                  .Select((v, i) => new IndexedValue(v, i))
                                  .ToArray();

        Array.Sort(orderedSourceValues, (v1, v2) => v2.Value.CompareTo(v1.Value));

        // Sort queries by descending k (a query looks for everything in a range > k).
        Array.Sort(queries, (q1, q2) => q2.GreaterThanLowerLimit.CompareTo(q1.GreaterThanLowerLimit));

        int sourceValuesIndex = 0;
        // Set an index in this BIT to 1 once the source array value at that index is greater
        // than the limit that we're considering for our queries. Queries are ordered descendingly
        // by the limit being considered, so once we set it to 1 the first time, it's good
        // for all future queries (since future queries will have even lower limits).
        var greaterThanLimitBIT = new PURQBinaryIndexedTree(sourceArray.Length);

        foreach (var query in queries)
        {
            while (sourceValuesIndex < sourceArray.Length &&
                   orderedSourceValues[sourceValuesIndex].Value > query.GreaterThanLowerLimit)
            {
                greaterThanLimitBIT.PointUpdate(
                    orderedSourceValues[sourceValuesIndex].SourceIndex, 1);
                ++sourceValuesIndex;
            }

            queryResults[query.ResultIndex] = greaterThanLimitBIT.SumQuery(
                query.QueryStartIndex, query.QueryEndIndex);
        }

        return(queryResults);
    }