Beispiel #1
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in C#:
//ORIGINAL LINE: private void detectDuplicateInputIds(Radix radix, org.neo4j.unsafe.impl.batchimport.input.Collector collector, org.neo4j.helpers.progress.ProgressListener progress) throws InterruptedException
        private void DetectDuplicateInputIds(Radix radix, Collector collector, ProgressListener progress)
        {
            // We do this collision sort using ParallelSort which has the data cache and the tracker cache,
            // the tracker cache gets sorted, data cache stays intact. In the collision data case we actually
            // have one more layer in here so we have tracker cache pointing to collisionNodeIdCache
            // pointing to dataCache. This can be done using the ParallelSort.Comparator abstraction.
            //
            // The Comparator below takes into account dataIndex for each eId its comparing so that an extra
            // comparison based on dataIndex is done if it's comparing two equal eIds. We do this so that
            // stretches of multiple equal eIds are sorted by dataIndex (i.e. node id) order,
            // to be able to write an efficient duplication scanning below and to have deterministic duplication reporting.
            Comparator duplicateComparator = new ComparatorAnonymousInnerClass(this);

            (new ParallelSort(radix, As5ByteLongArray(_collisionNodeIdCache), _numberOfCollisions - 1, _collisionTrackerCache, _processorsForParallelWork, progress, duplicateComparator)).run();

            // Here we have a populated C
            // We want to detect duplicate input ids within it
            long previousEid             = 0;
            int  previousGroupId         = -1;
            SameInputIdDetector detector = new SameInputIdDetector();

            progress.Started("DEDUPLICATE");
            for (int i = 0; i < _numberOfCollisions; i++)
            {
                long collisionIndex = _collisionTrackerCache.get(i);
                long nodeId         = _collisionNodeIdCache.get5ByteLong(collisionIndex, 0);
                long offset         = _collisionNodeIdCache.get6ByteLong(collisionIndex, 5);
                long eid            = _dataCache.get(nodeId);
                int  groupId        = GroupOf(nodeId);
                // collisions of same eId AND groupId are always together
                bool same = eid == previousEid && previousGroupId == groupId;
                if (!same)
                {
                    detector.Clear();
                }

                // Potential duplicate
                object inputId            = _collisionValues.get(offset);
                long   nonDuplicateNodeId = detector.Add(nodeId, inputId);
                if (nonDuplicateNodeId != -1)
                {                         // Duplicate
                    collector.CollectDuplicateNode(inputId, nodeId, _groups.get(groupId).name());
                    _trackerCache.markAsDuplicate(nodeId);
                    UnmarkAsCollision(nonDuplicateNodeId);
                }

                previousEid     = eid;
                previousGroupId = groupId;
                progress.Add(1);
            }
            progress.Done();
        }
            protected internal virtual void RenderNodes(StringBuilder @out)
            {
                List <Renderable>      values = new List <Renderable>((ICollection <Renderable>)_nodes.Values);
                IComparer <Renderable> comp   = new ComparatorAnonymousInnerClass(this);

                values.Sort(comp);

                IEnumerator it = values.GetEnumerator();

                while (it.MoveNext())
                {
                    Renderable renderable = (Renderable)it.Current;
                    @out.Append(renderable.Render());
                }
            }