public void SimpleOperationsShouldBehaveAsInDictionary() { var linkedHashMap = new LinkedHashMap <int, string> { { 1, "One" }, { 3, "Three" }, { 2, "Two" } }; Assert.AreEqual(3, linkedHashMap.Count); Assert.AreEqual(3, linkedHashMap.Keys.Count); Assert.AreEqual(3, linkedHashMap.Values.Count); Assert.True(linkedHashMap.ContainsKey(1)); Assert.True(linkedHashMap.ContainsKey(2)); Assert.True(linkedHashMap.ContainsKey(3)); Assert.True(!linkedHashMap.ContainsKey(4)); Assert.False(linkedHashMap.Remove(4)); Assert.True(linkedHashMap.Remove(2)); Assert.AreEqual(2, linkedHashMap.Count); Assert.True(linkedHashMap.Remove(1)); Assert.AreEqual(1, linkedHashMap.Count); Assert.AreEqual("Three", linkedHashMap[3]); linkedHashMap[1] = "OneAgain"; Assert.AreEqual(2, linkedHashMap.Count); Assert.AreEqual("OneAgain", linkedHashMap[1]); Assert.AreEqual("Three", linkedHashMap[3]); }
public void TestEquals() { EventTypeMetadata metadata = EventTypeMetadata.CreateNonPonoApplicationType( ApplicationType.MAP, "", true, true, true, false, false); IDictionary <String, Object> mapTwo = new LinkedHashMap <String, Object>(); mapTwo["MyInt"] = typeof(int); mapTwo["mySupportBean"] = typeof(SupportBean); mapTwo["myNullableSupportBean"] = typeof(SupportBean); mapTwo["myComplexBean"] = typeof(SupportBeanComplexProps); Assert.IsFalse( (new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null)).Equals(_eventType)); mapTwo["MyString"] = typeof(string); mapTwo["myNullableString"] = typeof(string); mapTwo["myNullType"] = null; // compare, should equal Assert.IsTrue( new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null).EqualsCompareType( _eventType)); Assert.IsFalse( (new MapEventType(metadata, "google", 1, _eventAdapterService, mapTwo, null, null, null)). EqualsCompareType(_eventType)); mapTwo["xx"] = typeof(int); Assert.IsFalse( _eventType.EqualsCompareType(new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null))); mapTwo.Remove("xx"); Assert.IsTrue( _eventType.EqualsCompareType(new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null))); mapTwo["MyInt"] = typeof(int?); Assert.IsTrue( _eventType.EqualsCompareType(new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null))); mapTwo.Remove("MyInt"); Assert.IsFalse( _eventType.EqualsCompareType(new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null))); mapTwo["MyInt"] = typeof(int); Assert.IsTrue( _eventType.EqualsCompareType(new MapEventType(metadata, "", 1, _eventAdapterService, mapTwo, null, null, null))); // Test boxed and primitive compatible IDictionary <String, Object> mapOne = new LinkedHashMap <String, Object>(); mapOne["MyInt"] = typeof(int); mapTwo = new LinkedHashMap <String, Object>(); mapTwo["MyInt"] = typeof(int); Assert.IsTrue( new MapEventType(metadata, "T1", 1, _eventAdapterService, mapOne, null, null, null).EqualsCompareType( new MapEventType(metadata, "T1", 1, _eventAdapterService, mapTwo, null, null, null))); }
public override void Unregister(V extension, Type key) { Object writeLock = GetWriteLock(); lock (writeLock) { base.Unregister(extension, key); ClassEntry <V> classEntry = CopyStructure(); LinkedHashMap <StrongKey <V>, List <DefEntry <V> > > definitionReverseMap = classEntry.definitionReverseMap; List <DefEntry <V> > weakEntriesOfStrongType = definitionReverseMap.Remove(new StrongKey <V>(extension, key)); if (weakEntriesOfStrongType == null) { return; } LinkedHashMap <Type, Object> typeToDefEntryMap = classEntry.typeToDefEntryMap; for (int a = weakEntriesOfStrongType.Count; a-- > 0;) { DefEntry <V> defEntry = weakEntriesOfStrongType[a]; Type registeredType = defEntry.type; Object value = typeToDefEntryMap.Get(registeredType); InterfaceFastList <DefEntry <V> > list = (InterfaceFastList <DefEntry <V> >)value; list.Remove(defEntry); if (list.Count == 0) { typeToDefEntryMap.Remove(registeredType); } TypeToDefEntryMapChanged(classEntry, registeredType); } this.classEntry = classEntry; } }
public void GivenParagraph_WheneAWordRemoved_ShouldReturnWordFrequency() { string Paragraph = "Paranoids are not " + "paranoid because they are paranoid but " + "because they keep putting themselves " + "deliberately into paranoid avoidable situations"; string[] words = Paragraph.ToLower().Split(" "); foreach (string word in words) { int value = LinkedHashMap.Get(word); if (value == default) { value = 1; } else { value += 1; } LinkedHashMap.Add(word, value); } LinkedHashMap.Remove("avoidable"); int frequency = LinkedHashMap.Get("avoidable"); Console.WriteLine(LinkedHashMap); Assert.AreEqual(0, frequency); }
public void ApplyLeave(EventBean[] eventsPerStream, ExprEvaluatorContext exprEvaluatorContext) { var theEvent = eventsPerStream[StreamId]; if (theEvent == null) { return; } _array = null; var value = RefSet.Get(theEvent); if (value == null) { return; } if (value == 1) { RefSet.Remove(theEvent); return; } value--; RefSet.Put(theEvent, value); }
public void ApplyLeave(EventBean[] eventsPerStream) { EventBean theEvent = eventsPerStream[streamId]; if (theEvent == null) { return; } _array = null; int value; if (!refSet.TryGetValue(theEvent, out value)) { return; } if (value == 1) { refSet.Remove(theEvent); return; } value--; refSet.Put(theEvent, value); }
public void Remove() { IDictionary <string, Player> lhm = new LinkedHashMap <string, Player>(); Fill(lhm); // remove an item that exists bool removed = lhm.Remove("23411"); Assert.IsTrue(removed); Assert.AreEqual(5, lhm.Count); // try to remove an item that does not exist removed = lhm.Remove("65432"); Assert.IsFalse(removed); Assert.AreEqual(5, lhm.Count); }
/// <summary> /// Remove an attribute by key. /// </summary> /// <param name="key">attribute key to remove</param> public void Remove(string key) { Validate.NotEmpty(key); if (attributes == null) { return; } attributes.Remove(key.ToLower()); }
public void GetEnumeratorModifyExceptionFromRemove() { IDictionary <string, Player> lhm = new LinkedHashMap <string, Player>(); lhm["123"] = new Player("123", "yyyyyyy"); Assert.Throws <InvalidOperationException>(() => { foreach (KeyValuePair <string, Player> pair in lhm) { lhm.Remove(pair.Key); } }); }
private static MatchEventSpec AnalyzeMatchEvent(EvalFactoryNode relativeNode) { var taggedEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); var arrayEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); // Determine all the filter nodes used in the pattern var evalNodeAnalysisResult = EvalNodeUtil.RecursiveAnalyzeChildNodes(relativeNode); // collect all filters underneath foreach (var filterNode in evalNodeAnalysisResult.FilterNodes) { var optionalTag = filterNode.EventAsName; if (optionalTag != null) { taggedEventTypes.Put( optionalTag, new Pair <EventType, string>( filterNode.FilterSpec.FilterForEventType, filterNode.FilterSpec.FilterForEventTypeName)); } } // collect those filters under a repeat since they are arrays var arrayTags = new HashSet <string>(); foreach (var matchUntilNode in evalNodeAnalysisResult.RepeatNodes) { var matchUntilAnalysisResult = EvalNodeUtil.RecursiveAnalyzeChildNodes(matchUntilNode.ChildNodes[0]); foreach (var filterNode in matchUntilAnalysisResult.FilterNodes) { var optionalTag = filterNode.EventAsName; if (optionalTag != null) { arrayTags.Add(optionalTag); } } } // for each array tag change collection foreach (var arrayTag in arrayTags) { if (taggedEventTypes.Get(arrayTag) != null) { arrayEventTypes.Put(arrayTag, taggedEventTypes.Get(arrayTag)); taggedEventTypes.Remove(arrayTag); } } return(new MatchEventSpec(taggedEventTypes, arrayEventTypes)); }
public virtual void Remove(string name) { _featureMap.Remove(name); }
private static void RecursiveCompile( EvalFactoryNode evalNode, StatementContext context, ExprEvaluatorContext evaluatorContext, ICollection <string> eventTypeReferences, bool isInsertInto, MatchEventSpec tags, Deque <int> subexpressionIdStack, Stack <EvalFactoryNode> parentNodeStack, ICollection <string> allTagNamesOrdered) { var counter = 0; parentNodeStack.Push(evalNode); foreach (var child in evalNode.ChildNodes) { subexpressionIdStack.AddLast(counter++); RecursiveCompile( child, context, evaluatorContext, eventTypeReferences, isInsertInto, tags, subexpressionIdStack, parentNodeStack, allTagNamesOrdered); subexpressionIdStack.RemoveLast(); } parentNodeStack.Pop(); LinkedHashMap <string, Pair <EventType, string> > newTaggedEventTypes = null; LinkedHashMap <string, Pair <EventType, string> > newArrayEventTypes = null; if (evalNode is EvalFilterFactoryNode) { var filterNode = (EvalFilterFactoryNode)evalNode; var eventName = filterNode.RawFilterSpec.EventTypeName; if (context.TableService.GetTableMetadata(eventName) != null) { throw new ExprValidationException("Tables cannot be used in pattern filter atoms"); } var resolvedEventType = FilterStreamSpecRaw.ResolveType( context.EngineURI, eventName, context.EventAdapterService, context.PlugInTypeResolutionURIs); var finalEventType = resolvedEventType; var optionalTag = filterNode.EventAsName; var isPropertyEvaluation = false; var isParentMatchUntil = IsParentMatchUntil(evalNode, parentNodeStack); // obtain property event type, if final event type is properties if (filterNode.RawFilterSpec.OptionalPropertyEvalSpec != null) { var optionalPropertyEvaluator = PropertyEvaluatorFactory.MakeEvaluator( context.Container, filterNode.RawFilterSpec.OptionalPropertyEvalSpec, resolvedEventType, filterNode.EventAsName, context.EventAdapterService, context.EngineImportService, context.SchedulingService, context.VariableService, context.ScriptingService, context.TableService, context.EngineURI, context.StatementId, context.StatementName, context.Annotations, subexpressionIdStack, context.ConfigSnapshot, context.NamedWindowMgmtService, context.StatementExtensionServicesContext); finalEventType = optionalPropertyEvaluator.FragmentEventType; isPropertyEvaluation = true; } if (finalEventType is EventTypeSPI) { eventTypeReferences.Add(((EventTypeSPI)finalEventType).Metadata.PrimaryName); } // If a tag was supplied for the type, the tags must stay with this type, i.e. a=BeanA -> b=BeanA -> a=BeanB is a no if (optionalTag != null) { var pair = tags.TaggedEventTypes.Get(optionalTag); EventType existingType = null; if (pair != null) { existingType = pair.First; } if (existingType == null) { pair = tags.ArrayEventTypes.Get(optionalTag); if (pair != null) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' used in the repeat-until operator cannot also appear in other filter expressions"); } } if ((existingType != null) && (existingType != finalEventType)) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' has already been declared for events of type " + existingType.UnderlyingType.FullName); } pair = new Pair <EventType, string>(finalEventType, eventName); // add tagged type if (isPropertyEvaluation || isParentMatchUntil) { newArrayEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); newArrayEventTypes.Put(optionalTag, pair); } else { newTaggedEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); newTaggedEventTypes.Put(optionalTag, pair); } } // For this filter, filter types are all known tags at this time, // and additionally stream 0 (self) is our event type. // Stream type service allows resolution by property name event if that name appears in other tags. // by defaulting to stream zero. // Stream zero is always the current event type, all others follow the order of the map (stream 1 to N). var selfStreamName = optionalTag; if (selfStreamName == null) { selfStreamName = "s_" + UuidGenerator.Generate(); } var filterTypes = new LinkedHashMap <string, Pair <EventType, string> >(); var typePair = new Pair <EventType, string>(finalEventType, eventName); filterTypes.Put(selfStreamName, typePair); filterTypes.PutAll(tags.TaggedEventTypes); // for the filter, specify all tags used var filterTaggedEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(tags.TaggedEventTypes); filterTaggedEventTypes.Remove(optionalTag); // handle array tags (match-until clause) LinkedHashMap <string, Pair <EventType, string> > arrayCompositeEventTypes = null; if (tags.ArrayEventTypes != null && !tags.ArrayEventTypes.IsEmpty()) { arrayCompositeEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); var patternSubexEventType = GetPatternSubexEventType( context.StatementId, "pattern", subexpressionIdStack); foreach (var entry in tags.ArrayEventTypes) { var specificArrayType = new LinkedHashMap <string, Pair <EventType, string> >(); specificArrayType.Put(entry.Key, entry.Value); var arrayTagCompositeEventType = context.EventAdapterService.CreateSemiAnonymousMapType( patternSubexEventType, Collections.GetEmptyMap <string, Pair <EventType, string> >(), specificArrayType, isInsertInto); context.StatementSemiAnonymousTypeRegistry.Register(arrayTagCompositeEventType); var tag = entry.Key; if (!filterTypes.ContainsKey(tag)) { var pair = new Pair <EventType, string>(arrayTagCompositeEventType, tag); filterTypes.Put(tag, pair); arrayCompositeEventTypes.Put(tag, pair); } } } StreamTypeService streamTypeService = new StreamTypeServiceImpl( filterTypes, context.EngineURI, true, false); var exprNodes = filterNode.RawFilterSpec.FilterExpressions; var spec = FilterSpecCompiler.MakeFilterSpec( resolvedEventType, eventName, exprNodes, filterNode.RawFilterSpec.OptionalPropertyEvalSpec, filterTaggedEventTypes, arrayCompositeEventTypes, streamTypeService, null, context, subexpressionIdStack); filterNode.FilterSpec = spec; } else if (evalNode is EvalObserverFactoryNode) { var observerNode = (EvalObserverFactoryNode)evalNode; try { var observerFactory = context.PatternResolutionService.Create(observerNode.PatternObserverSpec); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, tags.TaggedEventTypes, tags.ArrayEventTypes, subexpressionIdStack, "observer", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNOBSERVER, observerNode.PatternObserverSpec.ObjectParameters, validationContext); MatchedEventConvertor convertor = new MatchedEventConvertorImpl( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); observerNode.ObserverFactory = observerFactory; observerFactory.SetObserverParameters(validated, convertor, validationContext); } catch (ObserverParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalGuardFactoryNode) { var guardNode = (EvalGuardFactoryNode)evalNode; try { var guardFactory = context.PatternResolutionService.Create(guardNode.PatternGuardSpec); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, tags.TaggedEventTypes, tags.ArrayEventTypes, subexpressionIdStack, "guard", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNGUARD, guardNode.PatternGuardSpec.ObjectParameters, validationContext); MatchedEventConvertor convertor = new MatchedEventConvertorImpl( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); guardNode.GuardFactory = guardFactory; guardFactory.SetGuardParameters(validated, convertor); } catch (GuardParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalEveryDistinctFactoryNode) { var distinctNode = (EvalEveryDistinctFactoryNode)evalNode; var matchEventFromChildNodes = AnalyzeMatchEvent(distinctNode); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, subexpressionIdStack, "every-distinct", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); IList <ExprNode> validated; try { validated = ValidateExpressions( ExprNodeOrigin.PATTERNEVERYDISTINCT, distinctNode.Expressions, validationContext); } catch (ExprValidationPropertyException ex) { throw new ExprValidationPropertyException( ex.Message + ", every-distinct requires that all properties resolve from sub-expressions to the every-distinct", ex.InnerException); } MatchedEventConvertor convertor = new MatchedEventConvertorImpl( matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); distinctNode.Convertor = convertor; // Determine whether some expressions are constants or time period IList <ExprNode> distinctExpressions = new List <ExprNode>(); ExprTimePeriodEvalDeltaConst timeDeltaComputation = null; ExprNode expiryTimeExp = null; var count = -1; var last = validated.Count - 1; foreach (var expr in validated) { count++; if (count == last && expr is ExprTimePeriod) { expiryTimeExp = expr; var timePeriodExpr = (ExprTimePeriod)expiryTimeExp; timeDeltaComputation = timePeriodExpr.ConstEvaluator(new ExprEvaluatorContextStatement(context, false)); } else if (expr.IsConstantResult) { if (count == last) { var evaluateParams = new EvaluateParams(null, true, evaluatorContext); var value = expr.ExprEvaluator.Evaluate(evaluateParams); if (!(value.IsNumber())) { throw new ExprValidationException( "Invalid parameter for every-distinct, expected number of seconds constant (constant not considered for distinct)"); } var secondsExpire = expr.ExprEvaluator.Evaluate(evaluateParams); long?timeExpire; if (secondsExpire == null) { timeExpire = null; } else { timeExpire = context.TimeAbacus.DeltaForSecondsNumber(secondsExpire); } if (timeExpire != null && timeExpire > 0) { timeDeltaComputation = new ExprTimePeriodEvalDeltaConstGivenDelta(timeExpire.Value); expiryTimeExp = expr; } else { Log.Warn("Invalid seconds-expire " + timeExpire + " for " + ExprNodeUtility.ToExpressionStringMinPrecedenceSafe(expr)); } } else { Log.Warn( "Every-distinct node utilizes an expression returning a constant value, please check expression '{0}', not adding expression to distinct-value expression list", expr.ToExpressionStringMinPrecedenceSafe()); } } else { distinctExpressions.Add(expr); } } if (distinctExpressions.IsEmpty()) { throw new ExprValidationException( "Every-distinct node requires one or more distinct-value expressions that each return non-constant result values"); } distinctNode.SetDistinctExpressions(distinctExpressions, timeDeltaComputation, expiryTimeExp); } else if (evalNode is EvalMatchUntilFactoryNode) { var matchUntilNode = (EvalMatchUntilFactoryNode)evalNode; // compile bounds expressions, if any var untilMatchEventSpec = new MatchEventSpec(tags.TaggedEventTypes, tags.ArrayEventTypes); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, subexpressionIdStack, "until", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); var lower = ValidateBounds(matchUntilNode.LowerBounds, validationContext); matchUntilNode.LowerBounds = lower; var upper = ValidateBounds(matchUntilNode.UpperBounds, validationContext); matchUntilNode.UpperBounds = upper; var single = ValidateBounds(matchUntilNode.SingleBound, validationContext); matchUntilNode.SingleBound = single; var convertor = new MatchedEventConvertorImpl( untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); matchUntilNode.Convertor = convertor; // compile new tag lists ISet <string> arrayTags = null; var matchUntilAnalysisResult = EvalNodeUtil.RecursiveAnalyzeChildNodes(matchUntilNode.ChildNodes[0]); foreach (var filterNode in matchUntilAnalysisResult.FilterNodes) { var optionalTag = filterNode.EventAsName; if (optionalTag != null) { if (arrayTags == null) { arrayTags = new HashSet <string>(); } arrayTags.Add(optionalTag); } } if (arrayTags != null) { foreach (var arrayTag in arrayTags) { if (!tags.ArrayEventTypes.ContainsKey(arrayTag)) { tags.ArrayEventTypes.Put(arrayTag, tags.TaggedEventTypes.Get(arrayTag)); tags.TaggedEventTypes.Remove(arrayTag); } } } matchUntilNode.TagsArrayed = GetIndexesForTags(allTagNamesOrdered, arrayTags); } else if (evalNode is EvalFollowedByFactoryNode) { var followedByNode = (EvalFollowedByFactoryNode)evalNode; StreamTypeService streamTypeService = new StreamTypeServiceImpl(context.EngineURI, false); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); if (followedByNode.OptionalMaxExpressions != null) { IList <ExprNode> validated = new List <ExprNode>(); foreach (var maxExpr in followedByNode.OptionalMaxExpressions) { if (maxExpr == null) { validated.Add(null); } else { var visitor = new ExprNodeSummaryVisitor(); maxExpr.Accept(visitor); if (!visitor.IsPlain) { var errorMessage = "Invalid maximum expression in followed-by, " + visitor.GetMessage() + " are not allowed within the expression"; Log.Error(errorMessage); throw new ExprValidationException(errorMessage); } var validatedExpr = ExprNodeUtility.GetValidatedSubtree( ExprNodeOrigin.FOLLOWEDBYMAX, maxExpr, validationContext); validated.Add(validatedExpr); if ((validatedExpr.ExprEvaluator.ReturnType == null) || (!validatedExpr.ExprEvaluator.ReturnType.IsNumeric())) { var message = "Invalid maximum expression in followed-by, the expression must return an integer value"; throw new ExprValidationException(message); } } } followedByNode.OptionalMaxExpressions = validated; } } if (newTaggedEventTypes != null) { tags.TaggedEventTypes.PutAll(newTaggedEventTypes); } if (newArrayEventTypes != null) { tags.ArrayEventTypes.PutAll(newArrayEventTypes); } }
public void Remove() { IDictionary<string, Player> lhm = new LinkedHashMap<string, Player>(); Fill(lhm); // remove an item that exists bool removed =lhm.Remove("23411"); Assert.IsTrue(removed); Assert.AreEqual(5, lhm.Count); // try to remove an item that does not exist removed= lhm.Remove("65432"); Assert.IsFalse(removed); Assert.AreEqual(5, lhm.Count); }
private void doTest(SpatialOperation operation) { //first show that when there's no data, a query will result in no results { Query query = strategy.MakeQuery(new SpatialArgs(operation, randomRectangle())); SearchResults searchResults = executeQuery(query, 1); assertEquals(0, searchResults.numFound); } bool biasContains = (operation == SpatialOperation.Contains); //Main index loop: IDictionary <String, IShape> indexedShapes = new LinkedHashMap <String, IShape>(); IDictionary <String, IShape> indexedShapesGS = new LinkedHashMap <String, IShape>();//grid snapped int numIndexedShapes = randomIntBetween(1, 6); #pragma warning disable 219 bool indexedAtLeastOneShapePair = false; #pragma warning restore 219 for (int i = 0; i < numIndexedShapes; i++) { String id = "" + i; IShape indexedShape; int R = Random.nextInt(12); if (R == 0) {//1 in 12 indexedShape = null; } else if (R == 1) { //1 in 12 indexedShape = randomPoint(); //just one point } else if (R <= 4) {//3 in 12 //comprised of more than one shape indexedShape = randomShapePairRect(biasContains); indexedAtLeastOneShapePair = true; } else { indexedShape = randomRectangle();//just one rect } indexedShapes.Put(id, indexedShape); indexedShapesGS.Put(id, gridSnap(indexedShape)); adoc(id, indexedShape); if (Random.nextInt(10) == 0) { Commit();//intermediate commit, produces extra segments } } //delete some documents randomly IEnumerator <String> idIter = indexedShapes.Keys.ToList().GetEnumerator(); while (idIter.MoveNext()) { String id = idIter.Current; if (Random.nextInt(10) == 0) { DeleteDoc(id); //idIter.Remove(); indexedShapes.Remove(id); indexedShapesGS.Remove(id); } } Commit(); //Main query loop: int numQueryShapes = AtLeast(20); for (int i = 0; i < numQueryShapes; i++) { int scanLevel = randomInt(grid.MaxLevels); ((RecursivePrefixTreeStrategy)strategy).PrefixGridScanLevel = (scanLevel); IShape queryShape; switch (randomInt(10)) { case 0: queryShape = randomPoint(); break; // LUCENE-5549 //TODO debug: -Dtests.method=testWithin -Dtests.multiplier=3 -Dtests.seed=5F5294CE2E075A3E:AAD2F0F79288CA64 // case 1:case 2:case 3: // if (!indexedAtLeastOneShapePair) { // avoids ShapePair.relate(ShapePair), which isn't reliable // queryShape = randomShapePairRect(!biasContains);//invert biasContains for query side // break; // } default: queryShape = randomRectangle(); break; } IShape queryShapeGS = gridSnap(queryShape); bool opIsDisjoint = operation == SpatialOperation.IsDisjointTo; //Generate truth via brute force: // We ensure true-positive matches (if the predicate on the raw shapes match // then the search should find those same matches). // approximations, false-positive matches ISet <String> expectedIds = new /* LinkedHashSet<string>*/ HashSet <string>(); //true-positives ISet <String> secondaryIds = new /* LinkedHashSet<string>*/ HashSet <string>(); //false-positives (unless disjoint) foreach (var entry in indexedShapes) { String id = entry.Key; IShape indexedShapeCompare = entry.Value; if (indexedShapeCompare == null) { continue; } IShape queryShapeCompare = queryShape; if (operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { expectedIds.add(id); if (opIsDisjoint) { //if no longer intersect after buffering them, for disjoint, remember this indexedShapeCompare = indexedShapesGS[id]; queryShapeCompare = queryShapeGS; if (!operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { secondaryIds.add(id); } } } else if (!opIsDisjoint) { //buffer either the indexed or query shape (via gridSnap) and try again if (operation == SpatialOperation.Intersects) { indexedShapeCompare = indexedShapesGS[id]; queryShapeCompare = queryShapeGS; //TODO Unfortunately, grid-snapping both can result in intersections that otherwise // wouldn't happen when the grids are adjacent. Not a big deal but our test is just a // bit more lenient. } else if (operation == SpatialOperation.Contains) { indexedShapeCompare = indexedShapesGS[id]; } else if (operation == SpatialOperation.IsWithin) { queryShapeCompare = queryShapeGS; } if (operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { secondaryIds.add(id); } } } //Search and verify results SpatialArgs args = new SpatialArgs(operation, queryShape); if (queryShape is ShapePair) { args.DistErrPct = (0.0);//a hack; we want to be more detailed than gridSnap(queryShape) } Query query = strategy.MakeQuery(args); SearchResults got = executeQuery(query, 100); ISet <String> remainingExpectedIds = new /* LinkedHashSet<string>*/ HashSet <string>(expectedIds); foreach (SearchResult result in got.results) { String id = result.GetId(); bool removed = remainingExpectedIds.remove(id); if (!removed && (!opIsDisjoint && !secondaryIds.contains(id))) { fail("Shouldn't match", id, indexedShapes, indexedShapesGS, queryShape); } } if (opIsDisjoint) { remainingExpectedIds.removeAll(secondaryIds); } if (remainingExpectedIds.Any()) { var iter = remainingExpectedIds.GetEnumerator(); iter.MoveNext(); String id = iter.Current; fail("Should have matched", id, indexedShapes, indexedShapesGS, queryShape); } } }
public override void Update(EventBean[] newData, EventBean[] oldData) { using (Instrument.With( i => i.QViewProcessIRStream(this, _factory.ViewName, newData, oldData), i => i.AViewProcessIRStream())) { if ((newData != null) && (newData.Length > 0)) { // If we have an empty window about to be filled for the first time, add a callback bool removeSchedule = false; bool addSchedule = false; long timestamp = _agentInstanceContext.StatementContext.SchedulingService.Time; // if the window is already filled, then we may need to reschedule if (_currentBatch.IsNotEmpty()) { // check if we need to reschedule long callbackTime = timestamp + _timeDeltaComputation.DeltaMillisecondsAdd(timestamp); if (callbackTime != _callbackScheduledTime) { removeSchedule = true; addSchedule = true; } } else { addSchedule = true; } if (removeSchedule) { _agentInstanceContext.StatementContext.SchedulingService.Remove(_handle, _scheduleSlot); _callbackScheduledTime = -1; } if (addSchedule) { long msecIntervalSize = _timeDeltaComputation.DeltaMillisecondsAdd(timestamp); _agentInstanceContext.StatementContext.SchedulingService.Add( msecIntervalSize, _handle, _scheduleSlot); _callbackScheduledTime = msecIntervalSize + timestamp; } // add data points to the window for (int i = 0; i < newData.Length; i++) { _currentBatch.Put(newData[i], timestamp); InternalHandleAdded(newData[i], timestamp); _lastEvent = newData[i]; } } if ((oldData != null) && (oldData.Length > 0)) { bool removedLastEvent = false; foreach (EventBean anOldData in oldData) { _currentBatch.Remove(anOldData); InternalHandleRemoved(anOldData); if (anOldData == _lastEvent) { removedLastEvent = true; } } // we may need to reschedule as the newest event may have been deleted if (_currentBatch.Count == 0) { _agentInstanceContext.StatementContext.SchedulingService.Remove(_handle, _scheduleSlot); _callbackScheduledTime = -1; _lastEvent = null; } else { // reschedule if the last event was removed if (removedLastEvent) { ICollection <EventBean> keyset = _currentBatch.Keys; EventBean[] events = keyset.ToArray(); _lastEvent = events[events.Length - 1]; long lastTimestamp = _currentBatch.Get(_lastEvent); // reschedule, newest event deleted long timestamp = _agentInstanceContext.StatementContext.SchedulingService.Time; long callbackTime = lastTimestamp + _timeDeltaComputation.DeltaMillisecondsAdd(lastTimestamp); long deltaFromNow = callbackTime - timestamp; if (callbackTime != _callbackScheduledTime) { _agentInstanceContext.StatementContext.SchedulingService.Remove(_handle, _scheduleSlot); _agentInstanceContext.StatementContext.SchedulingService.Add( deltaFromNow, _handle, _scheduleSlot); _callbackScheduledTime = callbackTime; } } } } // Update child views if (HasViews) { using (Instrument.With( i => i.QViewIndicate(this, _factory.ViewName, newData, oldData), i => i.AViewIndicate())) { UpdateChildren(newData, oldData); } } } }
public void GetEnumeratorModifyExceptionFromRemove() { IDictionary<string, Player> lhm = new LinkedHashMap<string, Player>(); lhm["123"] = new Player("123", "yyyyyyy"); Assert.Throws<InvalidOperationException>(() => { foreach (KeyValuePair<string, Player> pair in lhm) { lhm.Remove(pair.Key); } }); }
private static void RecursiveCompile( EvalForgeNode evalNode, ISet<string> eventTypeReferences, bool isInsertInto, MatchEventSpec tags, Stack<EvalForgeNode> parentNodeStack, ISet<string> allTagNamesOrdered, int streamNum, StatementRawInfo statementRawInfo, StatementCompileTimeServices services) { parentNodeStack.Push(evalNode); foreach (var child in evalNode.ChildNodes) { RecursiveCompile( child, eventTypeReferences, isInsertInto, tags, parentNodeStack, allTagNamesOrdered, streamNum, statementRawInfo, services); } parentNodeStack.Pop(); IDictionary<string, Pair<EventType, string>> newTaggedEventTypes = null; IDictionary<string, Pair<EventType, string>> newArrayEventTypes = null; if (evalNode is EvalFilterForgeNode) { var filterNode = (EvalFilterForgeNode) evalNode; var eventName = filterNode.RawFilterSpec.EventTypeName; if (services.TableCompileTimeResolver.Resolve(eventName) != null) { throw new ExprValidationException("Tables cannot be used in pattern filter atoms"); } var resolvedEventType = ResolveTypeName(eventName, services.EventTypeCompileTimeResolver); var finalEventType = resolvedEventType; var optionalTag = filterNode.EventAsName; var isPropertyEvaluation = false; var isParentMatchUntil = IsParentMatchUntil(evalNode, parentNodeStack); // obtain property event type, if final event type is properties if (filterNode.RawFilterSpec.OptionalPropertyEvalSpec != null) { var optionalPropertyEvaluator = PropertyEvaluatorForgeFactory.MakeEvaluator( filterNode.RawFilterSpec.OptionalPropertyEvalSpec, resolvedEventType, filterNode.EventAsName, statementRawInfo, services); finalEventType = optionalPropertyEvaluator.FragmentEventType; isPropertyEvaluation = true; } // If a tag was supplied for the type, the tags must stay with this type, i.e. a=BeanA -> b=BeanA -> a=BeanB is a no if (optionalTag != null) { var pair = tags.TaggedEventTypes.Get(optionalTag); EventType existingType = null; if (pair != null) { existingType = pair.First; } if (existingType == null) { pair = tags.ArrayEventTypes.Get(optionalTag); if (pair != null) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' used in the repeat-until operator cannot also appear in other filter expressions"); } } if (existingType != null && existingType != finalEventType) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' has already been declared for events of type " + existingType.UnderlyingType.Name); } pair = new Pair<EventType, string>(finalEventType, eventName); // add tagged type if (isPropertyEvaluation || isParentMatchUntil) { newArrayEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(); newArrayEventTypes.Put(optionalTag, pair); } else { newTaggedEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(); newTaggedEventTypes.Put(optionalTag, pair); } } // For this filter, filter types are all known tags at this time, // and additionally stream 0 (self) is our event type. // Stream type service allows resolution by property name event if that name appears in other tags. // by defaulting to stream zero. // Stream zero is always the current event type, all others follow the order of the map (stream 1 to N). var selfStreamName = optionalTag; if (selfStreamName == null) { selfStreamName = "s_" + UuidGenerator.Generate(); } var filterTypes = new LinkedHashMap<string, Pair<EventType, string>>(); var typePair = new Pair<EventType, string>(finalEventType, eventName); filterTypes.Put(selfStreamName, typePair); filterTypes.PutAll(tags.TaggedEventTypes); // for the filter, specify all tags used var filterTaggedEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(tags.TaggedEventTypes); filterTaggedEventTypes.Remove(optionalTag); // handle array tags (match-until clause) IDictionary<string, Pair<EventType, string>> arrayCompositeEventTypes = null; if (tags.ArrayEventTypes != null && !tags.ArrayEventTypes.IsEmpty()) { arrayCompositeEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(); foreach (var entry in tags.ArrayEventTypes) { var specificArrayType = new LinkedHashMap<string, Pair<EventType, string>>(); specificArrayType.Put(entry.Key, entry.Value); var eventTypeName = services.EventTypeNameGeneratorStatement.GetAnonymousPatternNameWTag( streamNum, evalNode.FactoryNodeId, entry.Key); var mapProps = GetMapProperties( Collections.GetEmptyMap<string, Pair<EventType, string>>(), specificArrayType); var metadata = new EventTypeMetadata( eventTypeName, statementRawInfo.ModuleName, EventTypeTypeClass.PATTERNDERIVED, EventTypeApplicationType.MAP, NameAccessModifier.TRANSIENT, EventTypeBusModifier.NONBUS, false, EventTypeIdPair.Unassigned()); var mapEventType = BaseNestableEventUtil.MakeMapTypeCompileTime( metadata, mapProps, null, null, null, null, services.BeanEventTypeFactoryPrivate, services.EventTypeCompileTimeResolver); services.EventTypeCompileTimeRegistry.NewType(mapEventType); var tag = entry.Key; if (!filterTypes.ContainsKey(tag)) { var pair = new Pair<EventType, string>(mapEventType, tag); filterTypes.Put(tag, pair); arrayCompositeEventTypes.Put(tag, pair); } } } StreamTypeService streamTypeService = new StreamTypeServiceImpl(filterTypes, true, false); var exprNodes = filterNode.RawFilterSpec.FilterExpressions; var spec = FilterSpecCompiler.MakeFilterSpec( resolvedEventType, eventName, exprNodes, filterNode.RawFilterSpec.OptionalPropertyEvalSpec, filterTaggedEventTypes, arrayCompositeEventTypes, streamTypeService, null, statementRawInfo, services); filterNode.FilterSpec = spec; } else if (evalNode is EvalObserverForgeNode) { var observerNode = (EvalObserverForgeNode) evalNode; try { var observerForge = services.PatternResolutionService.Create(observerNode.PatternObserverSpec); var streamTypeService = GetStreamTypeService( tags.TaggedEventTypes, tags.ArrayEventTypes, observerNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder( streamTypeService, statementRawInfo, services).Build(); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNOBSERVER, observerNode.PatternObserverSpec.ObjectParameters, validationContext); var convertor = new MatchedEventConvertorForge( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered); observerNode.ObserverFactory = observerForge; observerForge.SetObserverParameters(validated, convertor, validationContext); } catch (ObserverParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalGuardForgeNode) { var guardNode = (EvalGuardForgeNode) evalNode; try { var guardForge = services.PatternResolutionService.Create(guardNode.PatternGuardSpec); var streamTypeService = GetStreamTypeService( tags.TaggedEventTypes, tags.ArrayEventTypes, guardNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder( streamTypeService, statementRawInfo, services).Build(); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNGUARD, guardNode.PatternGuardSpec.ObjectParameters, validationContext); var convertor = new MatchedEventConvertorForge( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered); guardNode.GuardForge = guardForge; guardForge.SetGuardParameters(validated, convertor, services); } catch (GuardParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalEveryDistinctForgeNode) { var distinctNode = (EvalEveryDistinctForgeNode) evalNode; var matchEventFromChildNodes = AnalyzeMatchEvent(distinctNode); var streamTypeService = GetStreamTypeService( matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, distinctNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder(streamTypeService, statementRawInfo, services).Build(); IList<ExprNode> validated; try { validated = ValidateExpressions( ExprNodeOrigin.PATTERNEVERYDISTINCT, distinctNode.Expressions, validationContext); } catch (ExprValidationPropertyException ex) { throw new ExprValidationPropertyException( ex.Message + ", every-distinct requires that all properties resolve from sub-expressions to the every-distinct", ex.InnerException); } var convertor = new MatchedEventConvertorForge( matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, allTagNamesOrdered); distinctNode.Convertor = convertor; // Determine whether some expressions are constants or time period IList<ExprNode> distinctExpressions = new List<ExprNode>(); TimePeriodComputeForge timePeriodComputeForge = null; ExprNode expiryTimeExp = null; var count = -1; var last = validated.Count - 1; foreach (var expr in validated) { count++; if (count == last && expr is ExprTimePeriod) { expiryTimeExp = expr; var timePeriodExpr = (ExprTimePeriod) expiryTimeExp; timePeriodComputeForge = timePeriodExpr.TimePeriodComputeForge; } else if (expr.Forge.ForgeConstantType.IsCompileTimeConstant) { if (count == last) { var value = expr.Forge.ExprEvaluator.Evaluate(null, true, null); if (!value.IsNumber()) { throw new ExprValidationException( "Invalid parameter for every-distinct, expected number of seconds constant (constant not considered for distinct)"); } var secondsExpire = expr.Forge.ExprEvaluator.Evaluate(null, true, null); var timeExpire = secondsExpire == null ? (long?) null : (long?) services.ImportServiceCompileTime.TimeAbacus.DeltaForSecondsNumber( secondsExpire); if (timeExpire != null && timeExpire > 0) { timePeriodComputeForge = new TimePeriodComputeConstGivenDeltaForge(timeExpire.Value); expiryTimeExp = expr; } else { Log.Warn( "Invalid seconds-expire " + timeExpire + " for " + ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(expr)); } } else { Log.Warn( "Every-distinct node utilizes an expression returning a constant value, please check expression '" + ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(expr) + "', not adding expression to distinct-value expression list"); } } else { distinctExpressions.Add(expr); } } if (distinctExpressions.IsEmpty()) { throw new ExprValidationException( "Every-distinct node requires one or more distinct-value expressions that each return non-constant result values"); } distinctNode.SetDistinctExpressions(distinctExpressions, timePeriodComputeForge, expiryTimeExp); } else if (evalNode is EvalMatchUntilForgeNode) { var matchUntilNode = (EvalMatchUntilForgeNode) evalNode; // compile bounds expressions, if any var untilMatchEventSpec = new MatchEventSpec(tags.TaggedEventTypes, tags.ArrayEventTypes); var streamTypeService = GetStreamTypeService( untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, matchUntilNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder(streamTypeService, statementRawInfo, services).Build(); var lower = ValidateBounds(matchUntilNode.LowerBounds, validationContext); matchUntilNode.LowerBounds = lower; var upper = ValidateBounds(matchUntilNode.UpperBounds, validationContext); matchUntilNode.UpperBounds = upper; var single = ValidateBounds(matchUntilNode.SingleBound, validationContext); matchUntilNode.SingleBound = single; bool tightlyBound; if (matchUntilNode.SingleBound != null) { ValidateMatchUntil(matchUntilNode.SingleBound, matchUntilNode.SingleBound, false); tightlyBound = true; } else { var allowZeroLowerBounds = matchUntilNode.LowerBounds != null && matchUntilNode.UpperBounds != null; tightlyBound = ValidateMatchUntil( matchUntilNode.LowerBounds, matchUntilNode.UpperBounds, allowZeroLowerBounds); } if (matchUntilNode.SingleBound == null && !tightlyBound && matchUntilNode.ChildNodes.Count < 2) { throw new ExprValidationException("Variable bounds repeat operator requires an until-expression"); } var convertor = new MatchedEventConvertorForge( untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, allTagNamesOrdered); matchUntilNode.Convertor = convertor; // compile new tag lists ISet<string> arrayTags = null; var matchUntilAnalysisResult = EvalNodeUtil.RecursiveAnalyzeChildNodes(matchUntilNode.ChildNodes[0]); foreach (var filterNode in matchUntilAnalysisResult.FilterNodes) { var optionalTag = filterNode.EventAsName; if (optionalTag != null) { if (arrayTags == null) { arrayTags = new HashSet<string>(); } arrayTags.Add(optionalTag); } } if (arrayTags != null) { foreach (var arrayTag in arrayTags) { if (!tags.ArrayEventTypes.ContainsKey(arrayTag)) { tags.ArrayEventTypes.Put(arrayTag, tags.TaggedEventTypes.Get(arrayTag)); tags.TaggedEventTypes.Remove(arrayTag); } } } matchUntilNode.TagsArrayedSet = GetIndexesForTags(allTagNamesOrdered, arrayTags); } else if (evalNode is EvalFollowedByForgeNode) { var followedByNode = (EvalFollowedByForgeNode) evalNode; StreamTypeService streamTypeService = new StreamTypeServiceImpl(false); var validationContext = new ExprValidationContextBuilder(streamTypeService, statementRawInfo, services).Build(); if (followedByNode.OptionalMaxExpressions != null) { IList<ExprNode> validated = new List<ExprNode>(); foreach (var maxExpr in followedByNode.OptionalMaxExpressions) { if (maxExpr == null) { validated.Add(null); } else { var visitor = new ExprNodeSummaryVisitor(); maxExpr.Accept(visitor); if (!visitor.IsPlain) { var errorMessage = "Invalid maximum expression in followed-by, " + visitor.Message + " are not allowed within the expression"; Log.Error(errorMessage); throw new ExprValidationException(errorMessage); } var validatedExpr = ExprNodeUtilityValidate.GetValidatedSubtree( ExprNodeOrigin.FOLLOWEDBYMAX, maxExpr, validationContext); validated.Add(validatedExpr); var returnType = validatedExpr.Forge.EvaluationType; if (returnType == null || !returnType.IsNumeric()) { var message = "Invalid maximum expression in followed-by, the expression must return an integer value"; throw new ExprValidationException(message); } } } followedByNode.OptionalMaxExpressions = validated; } } if (newTaggedEventTypes != null) { tags.TaggedEventTypes.PutAll(newTaggedEventTypes); } if (newArrayEventTypes != null) { tags.ArrayEventTypes.PutAll(newArrayEventTypes); } }
private void doTest(SpatialOperation operation) { //first show that when there's no data, a query will result in no results { Query query = strategy.MakeQuery(new SpatialArgs(operation, randomRectangle())); SearchResults searchResults = executeQuery(query, 1); assertEquals(0, searchResults.numFound); } bool biasContains = (operation == SpatialOperation.Contains); //Main index loop: IDictionary<String, IShape> indexedShapes = new LinkedHashMap<String, IShape>(); IDictionary<String, IShape> indexedShapesGS = new LinkedHashMap<String, IShape>();//grid snapped int numIndexedShapes = randomIntBetween(1, 6); bool indexedAtLeastOneShapePair = false; for (int i = 0; i < numIndexedShapes; i++) { String id = "" + i; IShape indexedShape; int R = Random().nextInt(12); if (R == 0) {//1 in 12 indexedShape = null; } else if (R == 1) {//1 in 12 indexedShape = randomPoint();//just one point } else if (R <= 4) {//3 in 12 //comprised of more than one shape indexedShape = randomShapePairRect(biasContains); indexedAtLeastOneShapePair = true; } else { indexedShape = randomRectangle();//just one rect } indexedShapes.Put(id, indexedShape); indexedShapesGS.Put(id, gridSnap(indexedShape)); adoc(id, indexedShape); if (Random().nextInt(10) == 0) Commit();//intermediate commit, produces extra segments } //delete some documents randomly IEnumerator<String> idIter = indexedShapes.Keys.ToList().GetEnumerator(); while (idIter.MoveNext()) { String id = idIter.Current; if (Random().nextInt(10) == 0) { DeleteDoc(id); //idIter.Remove(); indexedShapes.Remove(id); // LUCENENET TODO: Verify this works. indexedShapesGS.Remove(id); } } Commit(); //Main query loop: int numQueryShapes = AtLeast(20); for (int i = 0; i < numQueryShapes; i++) { int scanLevel = randomInt(grid.MaxLevels); ((RecursivePrefixTreeStrategy)strategy).PrefixGridScanLevel = (scanLevel); IShape queryShape; switch (randomInt(10)) { case 0: queryShape = randomPoint(); break; // LUCENE-5549 //TODO debug: -Dtests.method=testWithin -Dtests.multiplier=3 -Dtests.seed=5F5294CE2E075A3E:AAD2F0F79288CA64 // case 1:case 2:case 3: // if (!indexedAtLeastOneShapePair) { // avoids ShapePair.relate(ShapePair), which isn't reliable // queryShape = randomShapePairRect(!biasContains);//invert biasContains for query side // break; // } default: queryShape = randomRectangle(); break; } IShape queryShapeGS = gridSnap(queryShape); bool opIsDisjoint = operation == SpatialOperation.IsDisjointTo; //Generate truth via brute force: // We ensure true-positive matches (if the predicate on the raw shapes match // then the search should find those same matches). // approximations, false-positive matches ISet<String> expectedIds = new /* LinkedHashSet<string>*/ HashSet<string>();//true-positives ISet<String> secondaryIds = new /* LinkedHashSet<string>*/ HashSet<string>();//false-positives (unless disjoint) foreach (var entry in indexedShapes) { String id = entry.Key; IShape indexedShapeCompare = entry.Value; if (indexedShapeCompare == null) continue; IShape queryShapeCompare = queryShape; if (operation.Evaluate(indexedShapeCompare, queryShapeCompare)) { expectedIds.add(id); if (opIsDisjoint) { //if no longer intersect after buffering them, for disjoint, remember this indexedShapeCompare = indexedShapesGS[id]; queryShapeCompare = queryShapeGS; if (!operation.Evaluate(indexedShapeCompare, queryShapeCompare)) secondaryIds.add(id); } } else if (!opIsDisjoint) { //buffer either the indexed or query shape (via gridSnap) and try again if (operation == SpatialOperation.Intersects) { indexedShapeCompare = indexedShapesGS[id]; queryShapeCompare = queryShapeGS; //TODO Unfortunately, grid-snapping both can result in intersections that otherwise // wouldn't happen when the grids are adjacent. Not a big deal but our test is just a // bit more lenient. } else if (operation == SpatialOperation.Contains) { indexedShapeCompare = indexedShapesGS[id]; } else if (operation == SpatialOperation.IsWithin) { queryShapeCompare = queryShapeGS; } if (operation.Evaluate(indexedShapeCompare, queryShapeCompare)) secondaryIds.add(id); } } //Search and verify results SpatialArgs args = new SpatialArgs(operation, queryShape); if (queryShape is ShapePair) args.DistErrPct = (0.0);//a hack; we want to be more detailed than gridSnap(queryShape) Query query = strategy.MakeQuery(args); SearchResults got = executeQuery(query, 100); ISet<String> remainingExpectedIds = new /* LinkedHashSet<string>*/ HashSet<string>(expectedIds); foreach (SearchResult result in got.results) { String id = result.GetId(); bool removed = remainingExpectedIds.remove(id); if (!removed && (!opIsDisjoint && !secondaryIds.contains(id))) { fail("Shouldn't match", id, indexedShapes, indexedShapesGS, queryShape); } } if (opIsDisjoint) remainingExpectedIds.removeAll(secondaryIds); if (remainingExpectedIds.Any()) { var iter = remainingExpectedIds.GetEnumerator(); iter.MoveNext(); String id = iter.Current; fail("Should have matched", id, indexedShapes, indexedShapesGS, queryShape); } } }
public bool Remove(string key) { string dataKey = Attributes.DataKey(key); return(enclosingAttributes.Remove(dataKey)); }
public override void Update( EventBean[] newData, EventBean[] oldData) { _agentInstanceContext.AuditProvider.View(newData, oldData, _agentInstanceContext, _factory); _agentInstanceContext.InstrumentationProvider.QViewProcessIRStream(_factory, newData, oldData); if ((newData != null) && (newData.Length > 0)) { // If we have an empty window about to be filled for the first time, add a callback bool removeSchedule = false; bool addSchedule = false; long timestamp = _agentInstanceContext.StatementContext.SchedulingService.Time; // if the window is already filled, then we may need to reschedule if (!_currentBatch.IsEmpty()) { // check if we need to reschedule long callbackTime = timestamp + _timePeriodProvide.DeltaAdd(timestamp, null, true, _agentInstanceContext); if (callbackTime != _callbackScheduledTime) { removeSchedule = true; addSchedule = true; } } else { addSchedule = true; } if (removeSchedule) { _agentInstanceContext.AuditProvider.ScheduleRemove( _agentInstanceContext, _handle, ScheduleObjectType.view, _factory.ViewName); _agentInstanceContext.StatementContext.SchedulingService.Remove(_handle, _scheduleSlot); _callbackScheduledTime = -1; } if (addSchedule) { long timeIntervalSize = _timePeriodProvide.DeltaAdd(timestamp, null, true, _agentInstanceContext); _agentInstanceContext.AuditProvider.ScheduleAdd( timeIntervalSize, _agentInstanceContext, _handle, ScheduleObjectType.view, _factory.ViewName); _agentInstanceContext.StatementContext.SchedulingService.Add( timeIntervalSize, _handle, _scheduleSlot); _callbackScheduledTime = timeIntervalSize + timestamp; } // add data points to the window for (int i = 0; i < newData.Length; i++) { _currentBatch.Put(newData[i], timestamp); _lastEvent = newData[i]; } } if ((oldData != null) && (oldData.Length > 0)) { bool removedLastEvent = false; foreach (EventBean anOldData in oldData) { _currentBatch.Remove(anOldData); if (anOldData == _lastEvent) { removedLastEvent = true; } } // we may need to reschedule as the newest event may have been deleted if (_currentBatch.Count == 0) { _agentInstanceContext.AuditProvider.ScheduleRemove( _agentInstanceContext, _handle, ScheduleObjectType.view, _factory.ViewName); _agentInstanceContext.StatementContext.SchedulingService.Remove(_handle, _scheduleSlot); _callbackScheduledTime = -1; _lastEvent = null; } else { // reschedule if the last event was removed if (removedLastEvent) { EventBean[] events = _currentBatch.Keys.ToArray(); _lastEvent = events[events.Length - 1]; long lastTimestamp = _currentBatch.Get(_lastEvent); // reschedule, newest event deleted long timestamp = _agentInstanceContext.StatementContext.SchedulingService.Time; long callbackTime = lastTimestamp + _timePeriodProvide.DeltaAdd( lastTimestamp, null, true, _agentInstanceContext); long deltaFromNow = callbackTime - timestamp; if (callbackTime != _callbackScheduledTime) { _agentInstanceContext.AuditProvider.ScheduleRemove( _agentInstanceContext, _handle, ScheduleObjectType.view, _factory.ViewName); _agentInstanceContext.StatementContext.SchedulingService.Remove(_handle, _scheduleSlot); _agentInstanceContext.AuditProvider.ScheduleAdd( deltaFromNow, _agentInstanceContext, _handle, ScheduleObjectType.view, _factory.ViewName); _agentInstanceContext.StatementContext.SchedulingService.Add( deltaFromNow, _handle, _scheduleSlot); _callbackScheduledTime = callbackTime; } } } } // update child views var child = Child; if (child != null) { _agentInstanceContext.InstrumentationProvider.QViewIndicate(_factory, newData, oldData); child.Update(newData, oldData); _agentInstanceContext.InstrumentationProvider.AViewIndicate(); } _agentInstanceContext.InstrumentationProvider.AViewProcessIRStream(); }
/** * /// remove the given arc from the set of succors * * /// @param arc the arc to remove */ void DeleteSuccessor(SentenceHMMStateArc arc) { arcs.Remove(arc); }