public void IndexedSort(int propID) { lock (this) { if (_list == null) { Instantiate(); } IntHashSet hashSet = new IntHashSet(); for (int i = 0; i < _list.Count; i++) { hashSet.Add(_list [i]); } int destIndex = 0; IResultSet rs = MyPalStorage.Storage.SelectResourcesWithProp(propID); foreach (IRecord rec in rs) { int id = rec.GetIntValue(0); if (hashSet.Contains(id)) { _list [destIndex++] = id; } } } }
public void InsertAndDeletePageBenchmark() { IntHashSet numbers = new IntHashSet(); const int queueSize = 4000; TestKey keyFactory = new TestKey(); IBTree bTree = new /*BTree*/OmniaMeaBTree( _indexFileName, keyFactory ); using( bTree ) { bTree.Open( ); Queue queue = new Queue( queueSize ); for ( int i = 0; i < queueSize; i++ ) { TestKey testKey = new TestKey( GetUniqueRand( numbers ) ); queue.Enqueue( testKey ); bTree.InsertKey( testKey, (int)testKey.Key ); } int time = System.Environment.TickCount; for ( int i = 0; i < 100000; i++ ) { TestKey testKey = (TestKey)queue.Dequeue(); bTree.DeleteKey( testKey, (int)testKey.Key ); numbers.Remove( (int)testKey.Key ); TestKey newKey = new TestKey( GetUniqueRand( numbers ) ); queue.Enqueue( newKey ); bTree.InsertKey( newKey, (int)newKey.Key ); } time = System.Environment.TickCount - time; Console.WriteLine( " work took " + time.ToString() ); bTree.Close(); } }
static public int constructor(IntPtr l) { try { int argc = LuaDLL.lua_gettop(l); IntHashSet o; if (argc == 1) { o = new IntHashSet(); pushValue(l, true); pushValue(l, o); return(2); } else if (argc == 2) { System.Collections.Generic.IEnumerable <System.Int32> a1; checkType(l, 2, out a1); o = new IntHashSet(a1); pushValue(l, true); pushValue(l, o); return(2); } return(error(l, "New object failed.")); } catch (Exception e) { return(error(l, e)); } }
public override void EnumerationFinished() { if (_db != null) { _db.Close(); } if (_updatedConversations != null) { foreach (IntHashSet.Entry e in _updatedConversations) { IResource res = Core.ResourceStore.TryLoadResource(e.Key); if (res != null) { Core.FilterEngine.ExecRules(StandardEvents.ResourceReceived, res); Core.TextIndexManager.QueryIndexing(res.Id); } } _updatedConversations = null; } long endTicks = DateTime.Now.Ticks; Trace.WriteLineIf(IniSettings.TraceImport, "Miranda import took " + (endTicks - _startTicks) / 10000 + " ms"); if (!Interrupted && _indexStartDate == DateTime.MinValue) { IniSettings.FullIndexingCompleted = true; ExecuteInIdle = false; } _completed = true; }
public void SearchForRangeOrderTest() { IntHashSet numbers = new IntHashSet(); TestKey keyFactory = new TestKey(); IBTree bTree = new /*BTree*/OmniaMeaBTree( _indexFileName, keyFactory ); using( bTree ) { bTree.Open(); TestKey testKey = new TestKey( 0 ); for ( int i = 0; i < 10000; i++ ) { testKey = new TestKey( GetUniqueRand( numbers ) ); bTree.InsertKey( testKey, (int)testKey.Key ); } ArrayList keys_offsets = new ArrayList(); bTree.SearchForRange( new TestKey( 0 ), testKey, keys_offsets ); for( int j = 1; j < keys_offsets.Count; ++j ) { KeyPair pair1 = (KeyPair) keys_offsets[ j - 1 ]; KeyPair pair2 = (KeyPair) keys_offsets[ j ]; if( pair1._key.CompareTo( pair2._key ) > 0 ) { throw new Exception( "Invalid key order, j = " + j ); } } bTree.Close(); } }
/// <summary> /// Resturns list of handles of all available files. /// </summary> /// <returns></returns> public IntArrayList GetAllFiles(bool idle) { int handle; IntHashSet deletedFiles = new IntHashSet(); handle = _stream.GetFirstFreeFileHandle(); while (IsValidHandle(handle) && Core.State != CoreState.ShuttingDown && (!idle || Core.IsSystemIdle)) { deletedFiles.Add(handle); ClusteredCachedStream.Cluster cluster = _stream.GetCluster(handle); handle = _stream.OffsetToHandle(cluster.NextOffset); } IntArrayList result = new IntArrayList(); for (long off = ClusteredCachedStream.BLOB_FILE_SYSTEM_HEADER_SIZE; off < _stream.Length;) { if (Core.State == CoreState.ShuttingDown || (idle && !Core.IsSystemIdle)) { break; } handle = _stream.OffsetToHandle(off); ClusteredCachedStream.Cluster cluster = _stream.GetCluster(handle); if (cluster.PrevOffset == ClusteredCachedStream.NOT_SET && !deletedFiles.Contains(handle)) { result.Add(handle); } off += cluster.Length; off += ClusteredCachedStream.CLUSTER_HEADER_SIZE; } return(result); }
internal virtual System.Func <int, bool> GetPropertyPermissions(ISet <string> roles, System.Func <string, int> tokenLookup) { if (_propertyAuthorization) { //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final org.eclipse.collections.api.set.primitive.MutableIntSet blackListed = new org.eclipse.collections.impl.set.mutable.primitive.IntHashSet(); MutableIntSet blackListed = new IntHashSet(); foreach (string role in roles) { if (_roleToPropertyBlacklist.ContainsKey(role)) { Debug.Assert(_roleToPropertyBlacklist[role] != null, "Blacklist has to contain properties"); foreach (string propName in _roleToPropertyBlacklist[role]) { try { blackListed.add(tokenLookup(propName)); } catch (Exception) { _securityLog.error("Error in setting up property permissions, '" + propName + "' is not a valid property name."); } } } } return(property => !blackListed.contains(property)); } else { return(property => true); } }
public void ImportDB(IMirandaDB db) { _updatedConversations = new IntHashSet(); _db = db; if (_db.FileSize == _lastFileSize && _db.SlackSpace == _lastSlackSpace) { TraceImport("Skipping Miranda DB import because file size and slack space did not change"); _contactEnumerator = null; return; } _lastFileSize = _db.FileSize; _lastSlackSpace = db.SlackSpace; ImportContact(_db.UserContact, true); if (_selfContact == null) { _selfContact = Core.ContactManager.MySelf.Resource; } // for AIM, we cannot create the contact resource from // the account, so hook the contact to the account later foreach (IResource acct in _selfAccounts.Values) { if (!acct.HasLink(Props.MirandaAcct, _selfContact)) { acct.AddLink(Props.MirandaAcct, _selfContact); } } _contactIndex = 0; _contactEnumerator = _db.Contacts.GetEnumerator(); }
private void CheckProperty(NodeRecord record, CheckerEngine <NodeRecord, Org.Neo4j.Consistency.report.ConsistencyReport_NodeConsistencyReport> engine, ICollection <PropertyRecord> props) { if (!Record.NO_NEXT_PROPERTY.@is(record.NextProp)) { PropertyRecord firstProp = props.GetEnumerator().next(); if (!Record.NO_PREVIOUS_PROPERTY.@is(firstProp.PrevProp)) { engine.Report().propertyNotFirstInChain(firstProp); } //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final org.eclipse.collections.api.set.primitive.MutableIntSet keys = new org.eclipse.collections.impl.set.mutable.primitive.IntHashSet(); MutableIntSet keys = new IntHashSet(); foreach (PropertyRecord property in props) { if (!property.InUse()) { engine.Report().propertyNotInUse(property); } else { foreach (int key in ChainCheck.keys(property)) { if (!keys.add(key)) { engine.Report().propertyKeyNotUniqueInChain(); } } } } } }
private static void CollectResources(string resType, IntHashSet result, IntHashSet source, int level) { IntHashSet temp = new IntHashSet(); foreach (IntHashSet.Entry e in source) { IResourceList children = Core.ResourceStore.LoadResource(e.Key).GetLinksTo(resType, Core.Props.Reply); for (int i = 0; i < children.Count; i++) { int chid = children[i].Id; if (!source.Contains(chid) && !result.Contains(chid)) { temp.Add(chid); } } } foreach (IntHashSet.Entry e in temp) { result.Add(e.Key); } if (temp.Count > 0) { CollectResources(resType, result, temp, level + 1); } }
private void ScanEverythingBelongingToRelationships(RelationshipMappings relMappings) { using (RelationshipScanCursor relationshipScanCursor = _cursors.allocateRelationshipScanCursor(), PropertyCursor propertyCursor = _cursors.allocatePropertyCursor()) { _dataRead.allRelationshipsScan(relationshipScanCursor); while (relationshipScanCursor.Next()) { int typeId = relationshipScanCursor.Type(); relationshipScanCursor.Properties(propertyCursor); MutableIntSet propertyIds = IntSets.mutable.empty(); while (propertyCursor.Next()) { int propertyKey = propertyCursor.PropertyKey(); Value currentValue = propertyCursor.PropertyValue(); Pair <int, int> key = Pair.of(typeId, propertyKey); UpdateValueTypeInMapping(currentValue, key, relMappings.RelationshipTypeIdANDPropertyTypeIdToValueType); propertyIds.add(propertyKey); } propertyCursor.Close(); MutableIntSet oldPropertyKeySet = relMappings.RelationshipTypeIdToPropertyKeys.getOrDefault(typeId, _emptyPropertyIdSet); // find out which old properties we did not visited and mark them as nullable if (oldPropertyKeySet == _emptyPropertyIdSet) { if (propertyIds.size() == 0) { // Even if we find property key on other rels with this type, set all of them nullable relMappings.NullableRelationshipTypes.Add(typeId); } propertyIds.addAll(oldPropertyKeySet); } else { MutableIntSet currentPropertyIdsHelperSet = new IntHashSet(propertyIds.size()); currentPropertyIdsHelperSet.addAll(propertyIds); propertyIds.removeAll(oldPropertyKeySet); // only the brand new ones in propIds now oldPropertyKeySet.removeAll(currentPropertyIdsHelperSet); // only the old ones that are not on the new rel propertyIds.addAll(oldPropertyKeySet); propertyIds.forEach(id => { Pair <int, int> key = Pair.of(typeId, id); relMappings.RelationshipTypeIdANDPropertyTypeIdToValueType[key].setNullable(); }); propertyIds.addAll(currentPropertyIdsHelperSet); } relMappings.RelationshipTypeIdToPropertyKeys[typeId] = propertyIds; } relationshipScanCursor.Close(); } }
private void ScanEverythingBelongingToNodes(NodeMappings nodeMappings) { using (NodeCursor nodeCursor = _cursors.allocateNodeCursor(), PropertyCursor propertyCursor = _cursors.allocatePropertyCursor()) { _dataRead.allNodesScan(nodeCursor); while (nodeCursor.Next()) { // each node SortedLabels labels = SortedLabels.From(nodeCursor.Labels()); nodeCursor.Properties(propertyCursor); MutableIntSet propertyIds = IntSets.mutable.empty(); while (propertyCursor.Next()) { Value currentValue = propertyCursor.PropertyValue(); int propertyKeyId = propertyCursor.PropertyKey(); Pair <SortedLabels, int> key = Pair.of(labels, propertyKeyId); UpdateValueTypeInMapping(currentValue, key, nodeMappings.LabelSetANDNodePropertyKeyIdToValueType); propertyIds.add(propertyKeyId); } propertyCursor.Close(); MutableIntSet oldPropertyKeySet = nodeMappings.LabelSetToPropertyKeys.getOrDefault(labels, _emptyPropertyIdSet); // find out which old properties we did not visited and mark them as nullable if (oldPropertyKeySet == _emptyPropertyIdSet) { if (propertyIds.size() == 0) { // Even if we find property key on other nodes with those labels, set all of them nullable nodeMappings.NullableLabelSets.Add(labels); } propertyIds.addAll(oldPropertyKeySet); } else { MutableIntSet currentPropertyIdsHelperSet = new IntHashSet(propertyIds.size()); currentPropertyIdsHelperSet.addAll(propertyIds); propertyIds.removeAll(oldPropertyKeySet); // only the brand new ones in propIds now oldPropertyKeySet.removeAll(currentPropertyIdsHelperSet); // only the old ones that are not on the new node propertyIds.addAll(oldPropertyKeySet); propertyIds.forEach(id => { Pair <SortedLabels, int> key = Pair.of(labels, id); nodeMappings.LabelSetANDNodePropertyKeyIdToValueType[key].setNullable(); }); propertyIds.addAll(currentPropertyIdsHelperSet); } nodeMappings.LabelSetToPropertyKeys[labels] = propertyIds; } nodeCursor.Close(); } }
public void AddResources(IResourceList resources) { IResourceIdCollection ids = resources.ResourceIds; _ids = new IntHashSet(ids.Count / 2); foreach (int id in ids) { _ids.Add(id); } }
private void CollectAllSubNodes(IResource root, IntHashSet nodes) { if (root != _root) { nodes.Add(root.Id); } foreach (IResource child in BookmarkService.SubNodes(null, root)) { CollectAllSubNodes(child, nodes); } }
public virtual IntSet Types(int startingFromType, int upToType) { //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final org.eclipse.collections.api.set.primitive.MutableIntSet set = new org.eclipse.collections.impl.set.mutable.primitive.IntHashSet((upToType - startingFromType) * 2); MutableIntSet set = new IntHashSet((upToType - startingFromType) * 2); for (int i = startingFromType; i < upToType; i++) { set.add(Get(i).TypeId); } return(set); }
static public int Clear(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); self.Clear(); pushValue(l, true); return(1); } catch (Exception e) { return(error(l, e)); } }
static public int get_Comparer(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); pushValue(l, true); pushValue(l, self.Comparer); return(2); } catch (Exception e) { return(error(l, e)); } }
protected static int GetUniqueRand( IntHashSet numbers ) { /*int rand = _random.Next( ); while ( numbers.Contains( rand ) ) { rand = _random.Next( ); } numbers.Add( rand ); return rand;**/ numbers.Add( ++_i ); return _i; }
public void SequentialSearchDeleteInsert() { IntHashSet numbers = new IntHashSet(); const int queueSize = 100000; TestKey keyFactory = new TestKey(); IBTree bTree = new /*BTree*/OmniaMeaBTree( _indexFileName, keyFactory ); using( bTree ) { bTree.Open( ); //Random random = new Random( System.Environment.TickCount ); Queue queue = new Queue( queueSize ); for ( int i = 0; i < queueSize; i++ ) { int key = GetUniqueRand( numbers ); TestKey testKey = new TestKey( key ); queue.Enqueue( testKey ); bTree.InsertKey( testKey, key ); } bTree.Close(); if( !bTree.Open() ) { throw new Exception( "Can't reopen btree! "); } int time = System.Environment.TickCount; IntArrayList offsets = new IntArrayList(); for ( int i = 0; i < 20000; i++ ) { TestKey testKey = (TestKey)queue.Dequeue(); offsets.Clear(); bTree.SearchForRange( testKey, testKey, offsets ); Assert.AreEqual( 1, offsets.Count, testKey.Key.ToString() + " not found. i = " + i.ToString() ); Assert.AreEqual( (int)testKey.Key, offsets[0] ); bTree.DeleteKey( testKey, (int)testKey.Key ); numbers.Remove( (int)testKey.Key ); offsets.Clear(); bTree.SearchForRange( testKey, testKey, offsets ); Assert.AreEqual( 0, offsets.Count ); TestKey newKey = new TestKey( GetUniqueRand( numbers ) ); queue.Enqueue( newKey ); bTree.InsertKey( newKey, (int)newKey.Key ); offsets.Clear(); bTree.SearchForRange( newKey, newKey, offsets ); Assert.AreEqual( 1, offsets.Count ); Assert.AreEqual( (int)newKey.Key, offsets[0] ); } time = System.Environment.TickCount - time; Console.WriteLine( " work took " + time.ToString() ); bTree.Close(); } }
static public int UnionWith(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); System.Collections.Generic.IEnumerable <System.Int32> a1; checkType(l, 2, out a1); self.UnionWith(a1); pushValue(l, true); return(1); } catch (Exception e) { return(error(l, e)); } }
static public int OnDeserialization(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); System.Object a1; checkType(l, 2, out a1); self.OnDeserialization(a1); pushValue(l, true); return(1); } catch (Exception e) { return(error(l, e)); } }
private void DoStressProcessing( IBTree bTree ) { const int initialSize = 500000; const int iterations = 1000000; IntHashSet uniqueKeys = new IntHashSet(); IntArrayList array = new IntArrayList(); using( bTree ) { bTree.Open(); for( int i = 0; i < initialSize; ++i ) { bTree.InsertKey( new TestKey( GetUniqueRand( uniqueKeys ) ), 0 ); } for( int i = 0; i < iterations; ++i ) { int key = 0; foreach( IntHashSet.Entry e in uniqueKeys ) { key = e.Key; break; } array.Clear(); bTree.SearchForRange( new TestKey( key ), new TestKey( key ), array ); Assert.AreEqual( 1, array.Count ); bTree.DeleteKey( new TestKey( key ), 0 ); uniqueKeys.Remove( key ); if( ( i & 31 ) == 5 ) { array.Clear(); bTree.GetAllKeys( array ); Assert.AreEqual( initialSize + i - 1, array.Count ); Assert.AreEqual( uniqueKeys.Count, array.Count ); } bTree.InsertKey( new TestKey( GetUniqueRand( uniqueKeys ) ), 0 ); bTree.InsertKey( new TestKey( GetUniqueRand( uniqueKeys ) ), 0 ); if( ( i & 31 ) == 17 ) { array.Clear(); bTree.GetAllKeys( array ); Assert.AreEqual( initialSize + i + 1, array.Count ); Assert.AreEqual( uniqueKeys.Count, array.Count ); } Trace.WriteLine( "Passes: " + i ); } bTree.Close(); } }
static public int RemoveWhere(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); System.Predicate <System.Int32> a1; LuaDelegation.checkDelegate(l, 2, out a1); var ret = self.RemoveWhere(a1); pushValue(l, true); pushValue(l, ret); return(2); } catch (Exception e) { return(error(l, e)); } }
static public int IsProperSupersetOf(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); System.Collections.Generic.IEnumerable <System.Int32> a1; checkType(l, 2, out a1); var ret = self.IsProperSupersetOf(a1); pushValue(l, true); pushValue(l, ret); return(2); } catch (Exception e) { return(error(l, e)); } }
static public int Remove(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); System.Int32 a1; checkType(l, 2, out a1); var ret = self.Remove(a1); pushValue(l, true); pushValue(l, ret); return(2); } catch (Exception e) { return(error(l, e)); } }
static public int GetObjectData(IntPtr l) { try { IntHashSet self = (IntHashSet)checkSelf(l); System.Runtime.Serialization.SerializationInfo a1; checkType(l, 2, out a1); System.Runtime.Serialization.StreamingContext a2; checkValueType(l, 3, out a2); self.GetObjectData(a1, a2); pushValue(l, true); return(1); } catch (Exception e) { return(error(l, e)); } }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are ignored unless the option to convert to C# 7.2 'in' parameters is selected: //ORIGINAL LINE: public System.Func<org.neo4j.kernel.impl.store.record.RelationshipRecord,Check<org.neo4j.kernel.impl.store.record.RelationshipRecord,org.neo4j.consistency.report.ConsistencyReport_RelationshipConsistencyReport>> forRelationships(final org.neo4j.consistency.report.ConsistencyReporter reporter) public virtual System.Func <RelationshipRecord, Check <RelationshipRecord, Org.Neo4j.Consistency.report.ConsistencyReport_RelationshipConsistencyReport> > ForRelationships(ConsistencyReporter reporter) { return(relationship => { int[] propertyKeys = _relationships.get(relationship.Type); if (propertyKeys != null) { MutableIntSet keys = new IntHashSet(propertyKeys.Length); foreach (int key in propertyKeys) { keys.add(key); } return new RealCheck <Org.Neo4j.Kernel.impl.store.record.RelationshipRecord, Check <Org.Neo4j.Kernel.impl.store.record.RelationshipRecord, Org.Neo4j.Consistency.report.ConsistencyReport_RelationshipConsistencyReport> >(relationship, typeof(ConsistencyReport.RelationshipConsistencyReport), reporter, RecordType.Relationship, keys); } return NoCheck(); }); }
/// <summary> /// Called to return the drop effect when the specified data object is dragged over the /// specified resource. /// </summary> /// <param name="targetResource">The resource over which the drag happens.</param> /// <param name="data">The <see cref="IDataObject"/> containing the dragged data.</param> /// <param name="allowedEffect">The drag-and-drop operations which are allowed by the /// originator (or source) of the drag event.</param> /// <param name="keyState">The current state of the SHIFT, CTRL, and ALT keys, /// as well as the state of the mouse buttons.</param> /// <returns>The target drop effect.</returns> public DragDropEffects DragOver(IResource targetResource, IDataObject data, DragDropEffects allowedEffect, int keyState) { if (data.GetDataPresent(typeof(IResourceList))) // Dragging resources over { // The resources we're dragging IResourceList dragResources = (IResourceList)data.GetData(typeof(IResourceList)); // Restrict the allowed target res-types if (!((targetResource.Type == "Task") || (targetResource == Core.ResourceTreeManager.GetRootForType("Task")))) { return(DragDropEffects.None); } // Collect all the direct and indirect parents of the droptarget; then we'll check to avoid dropping parent on its children IntHashSet ancestors = new IntHashSet(); IResource parent = targetResource; while (parent != null) { ancestors.Add(parent.Id); parent = parent.GetLinkProp(TasksPlugin._linkSuperTask); } // Measure some metrics on the dragged resources, don't allow mixing tasks/resources and prohibit the internal resources bool bAllTasks = true; bool bNoTasks = true; bool bNoInternal = true; foreach (IResource res in dragResources) { bAllTasks = bAllTasks && (res.Type == "Task"); bNoTasks = bNoTasks && (res.Type != "Task"); bNoInternal = bNoInternal && (!Core.ResourceStore.ResourceTypes[res.Type].HasFlag(ResourceTypeFlags.Internal)); if (ancestors.Contains(res.Id)) { return(DragDropEffects.None); // Dropping parent on a child } } if (((!bAllTasks) && (!bNoTasks)) || (!bNoInternal)) { return(DragDropEffects.None); } // Link attachments, move the tasks return(bAllTasks ? DragDropEffects.Move : DragDropEffects.Link); } return(DragDropEffects.None); }
static JetTextBox() { // Keys that should not be fed to the action processor _hashEditorKeys = new IntHashSet(); lock (_hashEditorKeys) { _hashEditorKeys.Add((int)Keys.Delete); _hashEditorKeys.Add((int)Keys.Back); _hashEditorKeys.Add((int)Keys.Space); _hashEditorKeys.Add((int)Keys.Left); _hashEditorKeys.Add((int)Keys.Right); _hashEditorKeys.Add((int)Keys.Up); _hashEditorKeys.Add((int)Keys.Down); _hashEditorKeys.Add((int)(Keys.Back | Keys.Control)); _hashEditorKeys.Add((int)(Keys.Delete | Keys.Control)); } }
private void CreateMissingTokens(string[] names, int[] ids) { lock (this) { // We redo the resolving under the lock, to make sure that these ids are really missing, and won't be // created concurrently with us. MutableIntSet unresolvedIndexes = new IntHashSet(); ResolveIds(names, ids, i => !unresolvedIndexes.add(i)); if (!unresolvedIndexes.Empty) { // We still have unresolved ids to create. ObjectIntHashMap <string> createdTokens = CreateUnresolvedTokens(unresolvedIndexes, names, ids); IList <NamedToken> createdTokensList = new List <NamedToken>(createdTokens.size()); createdTokens.forEachKeyValue((name, index) => createdTokensList.Add(new NamedToken(name, ids[index]))); TokenRegistry.putAll(createdTokensList); } } }