/// <summary> /// Returns the property sheet for the given object instance /// </summary> /// <param name="instanceName"></param> /// <returns></returns> public PropertySheet GetPropertySheet(String instanceName) { if (!_symbolTable.ContainsKey(instanceName)) { // if it is not in the symbol table, so construct // it based upon our raw property data RawPropertyData rpd = null; if (_rawPropertyMap.ContainsKey(instanceName)) { rpd = _rawPropertyMap[instanceName]; } if (rpd != null) { var className = rpd.ClassName; try { // now load the property-sheet by using the class annotation var propertySheet = new PropertySheet(Type.GetType(className, true), instanceName, this, rpd); _symbolTable.Put(instanceName, propertySheet); } catch (Exception) { Trace.Fail(string.Format("Class '{0}' not found in Assembly '{1}'", className, Assembly.GetCallingAssembly())); throw; } } } return(_symbolTable.Get(instanceName)); }
public void SimpleOperationsShouldBehaveAsInDictionary() { var linkedHashMap = new LinkedHashMap <int, string> { { 1, "One" }, { 3, "Three" }, { 2, "Two" } }; Assert.AreEqual(3, linkedHashMap.Count); Assert.AreEqual(3, linkedHashMap.Keys.Count); Assert.AreEqual(3, linkedHashMap.Values.Count); Assert.True(linkedHashMap.ContainsKey(1)); Assert.True(linkedHashMap.ContainsKey(2)); Assert.True(linkedHashMap.ContainsKey(3)); Assert.True(!linkedHashMap.ContainsKey(4)); Assert.False(linkedHashMap.Remove(4)); Assert.True(linkedHashMap.Remove(2)); Assert.AreEqual(2, linkedHashMap.Count); Assert.True(linkedHashMap.Remove(1)); Assert.AreEqual(1, linkedHashMap.Count); Assert.AreEqual("Three", linkedHashMap[3]); linkedHashMap[1] = "OneAgain"; Assert.AreEqual(2, linkedHashMap.Count); Assert.AreEqual("OneAgain", linkedHashMap[1]); Assert.AreEqual("Three", linkedHashMap[3]); }
public Object EvaluateEnumMethod( EventBean[] eventsLambda, ICollection <object> target, bool isNewData, ExprEvaluatorContext context) { var beans = (ICollection <EventBean>)target; if (beans.IsEmpty() || beans.Count == 1) { return(beans); } var evaluateParams = new EvaluateParams(eventsLambda, isNewData, context); var distinct = new LinkedHashMap <IComparable, EventBean>(); foreach (var next in beans) { eventsLambda[StreamNumLambda] = next; var comparable = (IComparable)InnerExpression.Evaluate(evaluateParams); if (!distinct.ContainsKey(comparable)) { distinct.Put(comparable, next); } } return(distinct.Values); }
public Object EvaluateEnumMethod( EventBean[] eventsLambda, ICollection <object> target, bool isNewData, ExprEvaluatorContext context) { if (target == null || target.Count < 2) { return(target); } var evaluateParams = new EvaluateParams(eventsLambda, isNewData, context); var set = new LinkedHashMap <IComparable, Object>(); var resultEvent = new ObjectArrayEventBean(new Object[1], _resultEventType); var values = target; foreach (Object next in values) { resultEvent.Properties[0] = next; eventsLambda[StreamNumLambda] = resultEvent; var comparable = (IComparable)InnerExpression.Evaluate(evaluateParams); if (!set.ContainsKey(comparable)) { set.Put(comparable, next); } } return(set.Values); }
public static IDictionary <string, object> PopulateNameValueMap(params object[] values) { var result = new LinkedHashMap <string, object>(); var count = values.Length / 2; if (values.Length != count * 2) { throw new ArgumentException( "Expected an event number of name-value pairs"); } for (int i = 0; i < count; i++) { var index = i * 2; var keyValue = values[index]; if (!(keyValue is string)) { throw new ArgumentException( "Expected string-type key value at index " + index + " but found " + keyValue); } var key = (string)keyValue; var value = values[index + 1]; if (result.ContainsKey(key)) { throw new ArgumentException( "Found two or more values for key '" + key + "'"); } result[key] = value; } return(result); }
internal virtual ICollection <string> GetNames(string section, string subsection) { IList <ConfigLine> s = Sorted(); int idx = Find(s, section, subsection, string.Empty); if (idx < 0) { idx = -(idx + 1); } IDictionary <string, string> m = new LinkedHashMap <string, string>(); while (idx < s.Count) { ConfigLine e = s[idx++]; if (!e.Match(section, subsection)) { break; } if (e.name == null) { continue; } string l = StringUtils.ToLowerCase(e.name); if (!m.ContainsKey(l)) { m.Put(l, e.name); } } return(new ConfigSnapshot.CaseFoldingSet(m)); }
public object EvaluateEnumMethod( EventBean[] eventsLambda, ICollection<object> enumcoll, bool isNewData, ExprEvaluatorContext context) { if (enumcoll.Count <= 1) { return enumcoll; } IDictionary<IComparable, object> distinct = new LinkedHashMap<IComparable, object>(); var resultEvent = new ObjectArrayEventBean(new object[1], _forge.resultEventType); eventsLambda[_forge.StreamNumLambda] = resultEvent; var props = resultEvent.Properties; var values = enumcoll; foreach (var next in values) { props[0] = next; var comparable = (IComparable) _innerExpression.Evaluate(eventsLambda, isNewData, context); if (!distinct.ContainsKey(comparable)) { distinct.Put(comparable, next); } } return distinct.Values; }
private static StreamTypeService GetStreamTypeService( string engineURI, int statementId, EventAdapterService eventAdapterService, IDictionary <string, Pair <EventType, string> > taggedEventTypes, IDictionary <string, Pair <EventType, string> > arrayEventTypes, IEnumerable <int> subexpressionIdStack, string objectType, StatementContext statementContext) { var filterTypes = new LinkedHashMap <string, Pair <EventType, string> >(); filterTypes.PutAll(taggedEventTypes); // handle array tags (match-until clause) if (arrayEventTypes != null) { var patternSubexEventType = GetPatternSubexEventType(statementId, objectType, subexpressionIdStack); var arrayTagCompositeEventType = eventAdapterService.CreateSemiAnonymousMapType( patternSubexEventType, new Dictionary <string, Pair <EventType, string> >(), arrayEventTypes, false); statementContext.StatementSemiAnonymousTypeRegistry.Register(arrayTagCompositeEventType); foreach (var entry in arrayEventTypes) { var tag = entry.Key; if (!filterTypes.ContainsKey(tag)) { var pair = new Pair <EventType, string>(arrayTagCompositeEventType, tag); filterTypes.Put(tag, pair); } } } return(new StreamTypeServiceImpl(filterTypes, engineURI, true, false)); }
public override ExprNode Validate(ExprValidationContext validationContext) { var eventType = new LinkedHashMap<string, object>(); var isAllConstants = false; for (var i = 0; i < ColumnNames.Length; i++) { isAllConstants = isAllConstants && ChildNodes[i].Forge.ForgeConstantType.IsCompileTimeConstant; if (eventType.ContainsKey(ColumnNames[i])) { throw new ExprValidationException( "Failed to validate new-keyword property names, property '" + ColumnNames[i] + "' has already been declared"); } IDictionary<string, object> eventTypeResult = null; if (ChildNodes[i].Forge is ExprTypableReturnForge) { eventTypeResult = ((ExprTypableReturnForge) ChildNodes[i].Forge).RowProperties; } if (eventTypeResult != null) { eventType.Put(ColumnNames[i], eventTypeResult); } else { var classResult = ChildNodes[i].Forge.EvaluationType.GetBoxedType(); eventType.Put(ColumnNames[i], classResult); } } _forge = new ExprNewStructNodeForge(this, isAllConstants, eventType); return null; }
public override ExprNode Validate(ExprValidationContext validationContext) { _eventType = new LinkedHashMap <string, Object>(); _evaluators = ExprNodeUtility.GetEvaluators(this.ChildNodes); for (var i = 0; i < _columnNames.Length; i++) { _isAllConstants = _isAllConstants && this.ChildNodes[i].IsConstantResult; if (_eventType.ContainsKey(_columnNames[i])) { throw new ExprValidationException("Failed to validate new-keyword property names, property '" + _columnNames[i] + "' has already been declared"); } IDictionary <string, Object> eventTypeResult = null; if (_evaluators[i] is ExprEvaluatorTypableReturn) { eventTypeResult = ((ExprEvaluatorTypableReturn)_evaluators[i]).RowProperties; } if (eventTypeResult != null) { _eventType.Put(_columnNames[i], eventTypeResult); } else { var classResult = _evaluators[i].ReturnType.GetBoxedType(); _eventType.Put(_columnNames[i], classResult); } } return(null); }
private void AddColumnWithValueOrType(string columnName, object valueOrType) { if (columns.ContainsKey(columnName)) { throw new ArgumentException( $"The column '{columnName}' has already been added in this SQL builder", nameof(columnName)); } columns.Add(columnName, valueOrType); }
/// <summary> /// </summary> /// <param name="sym"></param> /// <exception cref="ArgumentException"></exception> public virtual void Define(ISymbol sym) { if (symbols.ContainsKey(sym.GetName())) { throw new ArgumentException("duplicate symbol " + sym.GetName()); } sym.SetScope(this); sym.SetInsertionOrderNumber(symbols.Count); // set to insertion position from 0 symbols.Add(sym.GetName(), sym); }
public override void DoAction(IEventArgs args) { if (cache == null || keys.Contains("{") || keys.Contains("}")) { cache = new LinkedHashMap <string, string>(); string[] ks = StringUtil.Split(FreeUtil.ReplaceVar(keys, args), new string[] { ",", "," }); foreach (string k in ks) { string[] vs = StringUtil.Split(k, "="); if (vs.Length == 2) { cache[vs[0].Trim()] = vs[1].Trim(); } else { if (vs.Length == 1) { cache[vs[0].Trim()] = vs[0].Trim(); } } } } if (StringUtil.IsNullOrEmpty(key)) { int i = 0; foreach (string k in cache.Keys) { HandleOne(cache.Count, i + 1, k, cache[k], args); i++; } } else { string k = FreeUtil.ReplaceVar(key, args); if (cache.ContainsKey(k) || useKey) { string v = cache[k]; args.GetDefault().GetParameters().TempUse(new BoolPara("hasKey", v != null)); if (v == null) { v = key; } args.GetDefault().GetParameters().TempUse(new StringPara("key", k)); args.GetDefault().GetParameters().TempUse(new StringPara("value", v)); if (action != null) { action.Act(args); } args.GetDefault().GetParameters().Resume("key"); args.GetDefault().GetParameters().Resume("value"); args.GetDefault().GetParameters().Resume("hasKey"); } } }
private void CheckResults(PatternTestStyle testStyle, String eventId) { // For each test descriptor, make sure the listener has received exactly the events expected int index = 0; Log.Debug("CheckResults: Checking results for event " + eventId); foreach (EventExpressionCase descriptor in _caseList.Results) { String expressionText = _expressions[index].Text; LinkedHashMap <String, LinkedList <EventDescriptor> > allExpectedResults = descriptor.ExpectedResults; EventBean[] receivedResults = _listeners[index].LastNewData; index++; // If nothing at all was expected for this event, make sure nothing was received if (!(allExpectedResults.ContainsKey(eventId))) { if ((receivedResults != null) && (receivedResults.Length > 0)) { Log.Debug("CheckResults: Incorrect result for style " + testStyle + " expression : " + expressionText); Log.Debug("CheckResults: Expected no results for event " + eventId + ", but received " + receivedResults.Length + " events"); Log.Debug("CheckResults: Received, have " + receivedResults.Length + " entries"); PrintList(receivedResults); Assert.Fail(); } continue; } LinkedList <EventDescriptor> expectedResults = allExpectedResults.Get(eventId); // Compare the result lists, not caring about the order of the elements try { if (!(CompareLists(receivedResults, expectedResults))) { Log.Debug("CheckResults: Incorrect result for style " + testStyle + " expression : " + expressionText); Log.Debug("CheckResults: Expected size=" + expectedResults.Count + " received size=" + (receivedResults == null ? 0 : receivedResults.Length)); Log.Debug("CheckResults: Expected, have " + expectedResults.Count + " entries"); PrintList(expectedResults); Log.Debug("CheckResults: Received, have " + (receivedResults == null ? 0 : receivedResults.Length) + " entries"); PrintList(receivedResults); Assert.Fail(); } } catch (Exception ex) { Console.WriteLine(ex.StackTrace); Assert.Fail("For statement '" + expressionText + "' failed to assert: " + ex.Message); } } }
private static StreamTypeService GetStreamTypeService( IDictionary<string, Pair<EventType, string>> taggedEventTypes, IDictionary<string, Pair<EventType, string>> arrayEventTypes, EvalForgeNode forge, int streamNum, StatementRawInfo statementRawInfo, StatementCompileTimeServices services) { var filterTypes = new LinkedHashMap<string, Pair<EventType, string>>(); filterTypes.PutAll(taggedEventTypes); // handle array tags (match-until clause) if (arrayEventTypes != null) { var eventTypeName = services.EventTypeNameGeneratorStatement.GetAnonymousPatternName(streamNum, forge.FactoryNodeId); var metadata = new EventTypeMetadata( eventTypeName, statementRawInfo.ModuleName, EventTypeTypeClass.PATTERNDERIVED, EventTypeApplicationType.MAP, NameAccessModifier.TRANSIENT, EventTypeBusModifier.NONBUS, false, EventTypeIdPair.Unassigned()); var mapProperties = GetMapProperties( new Dictionary<string, Pair<EventType, string>>(), arrayEventTypes); var mapEventType = BaseNestableEventUtil.MakeMapTypeCompileTime( metadata, mapProperties, null, null, null, null, services.BeanEventTypeFactoryPrivate, services.EventTypeCompileTimeResolver); services.EventTypeCompileTimeRegistry.NewType(mapEventType); EventType arrayTagCompositeEventType = mapEventType; foreach (var entry in arrayEventTypes) { var tag = entry.Key; if (!filterTypes.ContainsKey(tag)) { var pair = new Pair<EventType, string>(arrayTagCompositeEventType, tag); filterTypes.Put(tag, pair); } } } return new StreamTypeServiceImpl(filterTypes, true, false); }
public static Color toColor(string s) { if (WCA_COLORS.ContainsKey(s)) { return(WCA_COLORS[s]); } try { return(new Color(s)); } catch //(Exception e) { return(null); } }
private int CountExpectedEvents(String eventId) { int result = 0; foreach (EventExpressionCase descriptor in _caseList.Results) { LinkedHashMap <String, LinkedList <EventDescriptor> > allExpectedResults = descriptor.ExpectedResults; // If nothing at all was expected for this event, make sure nothing was received if (allExpectedResults.ContainsKey(eventId)) { result++; } } return(result); }
public void EmptyCollectionShouldWork() { var linkedHashMap = new LinkedHashMap <int, string>(); Assert.False(linkedHashMap.Contains(new KeyValuePair <int, string>())); Assert.False(linkedHashMap.ContainsKey(0)); Assert.AreEqual(0, linkedHashMap.Count); foreach (var _ in linkedHashMap) { throw new Exception("There should be no elements."); } Assert.AreEqual(0, linkedHashMap.Keys.Count); Assert.AreEqual(0, linkedHashMap.Values.Count); }
//------------------------------------------------------------------------- // parses the INI file format private static ImmutableMap <string, ImmutableListMultimap <string, string> > parse(ImmutableList <string> lines) { // cannot use ArrayListMultiMap as it does not retain the order of the keys // whereas ImmutableListMultimap does retain the order of the keys IDictionary <string, ImmutableListMultimap.Builder <string, string> > ini = new LinkedHashMap <string, ImmutableListMultimap.Builder <string, string> >(); ImmutableListMultimap.Builder <string, string> currentSection = null; int lineNum = 0; foreach (string line in lines) { lineNum++; line = line.Trim(); if (line.Length == 0 || line.StartsWith("#", StringComparison.Ordinal) || line.StartsWith(";", StringComparison.Ordinal)) { continue; } if (line.StartsWith("[", StringComparison.Ordinal) && line.EndsWith("]", StringComparison.Ordinal)) { string sectionName = line.Substring(1, (line.Length - 1) - 1).Trim(); if (ini.ContainsKey(sectionName)) { throw new System.ArgumentException("Invalid INI file, duplicate section not allowed, line " + lineNum); } currentSection = ImmutableListMultimap.builder(); ini[sectionName] = currentSection; } else if (currentSection == null) { throw new System.ArgumentException("Invalid INI file, properties must be within a [section], line " + lineNum); } else { int equalsPosition = line.IndexOf(" = ", StringComparison.Ordinal); equalsPosition = equalsPosition < 0 ? line.IndexOf('=') : equalsPosition + 1; string key = (equalsPosition < 0 ? line.Trim() : line.Substring(0, equalsPosition).Trim()); string value = (equalsPosition < 0 ? "" : line.Substring(equalsPosition + 1).Trim()); if (key.Length == 0) { throw new System.ArgumentException("Invalid INI file, empty key, line " + lineNum); } currentSection.put(key, value); } } return(MapStream.of(ini).mapValues(b => b.build()).toMap()); }
private static void AddNotYetNavigated( int streamNo, int numStreams, LinkedHashMap <int, int[]> substreamsPerStream, NStreamQueryPlanBuilder.BestChainResult bestChain) { // sum up all substreams (the query plan for each stream: nested iteration or cardinal) ISet <int> streams = new HashSet <int>(); streams.Add(streamNo); RecursiveAdd(streamNo, streamNo, substreamsPerStream, streams, false); // we are done, all have navigated if (streams.Count == numStreams) { return; } var previous = streamNo; foreach (var stream in bestChain.Chain) { if (streams.Contains(stream)) { previous = stream; continue; } // add node as a nested join to the previous stream var substreams = substreamsPerStream.Get(previous); if (substreams == null) { substreams = new int[0]; } var added = CollectionUtil.AddValue(substreams, stream); substreamsPerStream.Put(previous, added); if (!substreamsPerStream.ContainsKey(stream)) { substreamsPerStream.Put(stream, new int[0]); } previous = stream; } }
/// <summary> /// Removed duplicate properties using the property name to find unique properties. /// </summary> /// <param name="properties">is a list of property descriptors</param> public static void RemoveDuplicateProperties(IList<PropertyStem> properties) { var set = new LinkedHashMap<string, PropertyStem>(); var toRemove = new List<PropertyStem>(); // add duplicates to separate list foreach (var desc in properties) { if (set.ContainsKey(desc.PropertyName)) { toRemove.Add(desc); continue; } set.Put(desc.PropertyName, desc); } // remove duplicates foreach (var desc in toRemove) { properties.Remove(desc); } }
public void AddNamedBean(String beanName, Object bean) { CheckNotDisposed(); CheckNotRunning(); ParamChecker.AssertParamNotNull(beanName, "beanName"); ParamChecker.AssertParamNotNull(bean, "bean"); if (nameToServiceDict == null) { nameToServiceDict = new LinkedHashMap <String, Object>(); } if (nameToServiceDict.ContainsKey(beanName)) { throw CreateDuplicateBeanNameException(beanName, bean, nameToServiceDict.Get(beanName)); } if (beanName.Contains("&") || beanName.Contains("*") || beanName.Contains(" ") || beanName.Contains("\t")) { throw new ArgumentException("Bean name '" + beanName + "' must not contain any of the following characters: '&', '*' or any whitespace"); } nameToServiceDict.Put(beanName, bean); }
/// <summary> /// Reloads the internal SPI list. /// Changes to the service list are visible after the method ends, all /// iterators (e.g, from <see cref="AvailableServices"/>,...) stay consistent. /// /// <para/><b>NOTE:</b> Only new service providers are added, existing ones are /// never removed or replaced. /// /// <para/><em>this method is expensive and should only be called for discovery /// of new service providers on the given classpath/classloader!</em> /// </summary> public void Reload() { lock (this) { IDictionary <string, Type> services = new LinkedHashMap <string, Type>(this.services); SPIClassIterator <S> loader = SPIClassIterator <S> .Get(); foreach (var service in loader) { string clazzName = service.Name; string name = null; foreach (string suffix in suffixes) { if (clazzName.EndsWith(suffix, StringComparison.Ordinal)) { name = clazzName.Substring(0, clazzName.Length - suffix.Length).ToLowerInvariant(); break; } } if (name == null) { throw new InvalidOperationException("The class name " + service.Name + " has wrong suffix, allowed are: " + Arrays.ToString(suffixes)); } // only add the first one for each name, later services will be ignored // this allows to place services before others in classpath to make // them used instead of others // // LUCENETODO: Should we disallow duplicate names here? // Allowing it may get confusing on collisions, as different packages // could contain same factory class, which is a naming bug! // When changing this be careful to allow reload()! if (!services.ContainsKey(name)) { services.Add(name, service); } } this.services = Collections.UnmodifiableMap(services); } }
// function WEIGHTED-SAMPLE(bn, e) returns an event and a weight /** * The WEIGHTED-SAMPLE function in Figure 14.15. * * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @return return <b>x</b>, w - an event with its associated weight. */ public Pair <Map <RandomVariable, Object>, Double> weightedSample( BayesianNetwork bn, AssignmentProposition[] e) { // w <- 1; double w = 1.0; // <b>x</b> <- an event with n elements initialized from e Map <RandomVariable, Object> x = new LinkedHashMap <RandomVariable, Object>(); foreach (AssignmentProposition ap in e) { x.Add(ap.getTermVariable(), ap.getValue()); } // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do foreach (RandomVariable Xi in bn.getVariablesInTopologicalOrder()) { // if X<sub>i</sub> is an evidence variable with value x<sub>i</sub> // in e if (x.ContainsKey(Xi)) { // then w <- w * P(X<sub>i</sub> = x<sub>i</sub> | // parents(X<sub>i</sub>)) w *= bn.getNode(Xi) .getCPD() .getValue( ProbUtil.getEventValuesForXiGivenParents( bn.getNode(Xi), x)); } else { // else <b>x</b>[i] <- a random sample from // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>)) x.Add(Xi, ProbUtil.randomSample(bn.getNode(Xi), x, randomizer)); } } // return <b>x</b>, w return(new Pair <Map <RandomVariable, Object>, Double>(x, w)); }
public object EvaluateEnumMethod( EventBean[] eventsLambda, ICollection<object> enumcoll, bool isNewData, ExprEvaluatorContext context) { var beans = (ICollection<EventBean>) enumcoll; if (beans.Count <= 1) { return beans; } IDictionary<IComparable, EventBean> distinct = new LinkedHashMap<IComparable, EventBean>(); foreach (var next in beans) { eventsLambda[_forge.StreamNumLambda] = next; var comparable = (IComparable) _innerExpression.Evaluate(eventsLambda, isNewData, context); if (!distinct.ContainsKey(comparable)) { distinct.Put(comparable, next); } } return distinct.Values; }
internal SectionNames(ConfigSnapshot cfg) { IDictionary <string, string> sec = new LinkedHashMap <string, string>(); IDictionary <string, ICollection <string> > sub = new Dictionary <string, ICollection <string> >(); while (cfg != null) { foreach (ConfigLine e in cfg.entryList) { if (e.section == null) { continue; } string l1 = StringUtils.ToLowerCase(e.section); if (!sec.ContainsKey(l1)) { sec.Put(l1, e.section); } if (e.subsection == null) { continue; } ICollection <string> m = sub.Get(l1); if (m == null) { m = new LinkedHashSet <string>(); sub.Put(l1, m); } m.AddItem(e.subsection); } cfg = cfg.baseState; } sections = new ConfigSnapshot.CaseFoldingSet(sec); subsections = sub; }
/// <summary> /// Detect repetition groups. Done once - for first doc. </summary> private IList <IList <PhrasePositions> > GatherRptGroups(LinkedHashMap <Term, int?> rptTerms) { PhrasePositions[] rpp = RepeatingPPs(rptTerms); IList <IList <PhrasePositions> > res = new List <IList <PhrasePositions> >(); if (!hasMultiTermRpts) { // simpler - no multi-terms - can base on positions in first doc for (int i = 0; i < rpp.Length; i++) { PhrasePositions pp = rpp[i]; if (pp.rptGroup >= 0) // already marked as a repetition { continue; } int tpPos = TpPos(pp); for (int j = i + 1; j < rpp.Length; j++) { PhrasePositions pp2 = rpp[j]; if (pp2.rptGroup >= 0 || pp2.offset == pp.offset || TpPos(pp2) != tpPos) // not a repetition - not a repetition: two PPs are originally in same offset in the query! - already marked as a repetition { continue; } // a repetition int g = pp.rptGroup; if (g < 0) { g = res.Count; pp.rptGroup = g; List <PhrasePositions> rl = new List <PhrasePositions>(2); rl.Add(pp); res.Add(rl); } pp2.rptGroup = g; res[g].Add(pp2); } } } else { // more involved - has multi-terms List <HashSet <PhrasePositions> > tmp = new List <HashSet <PhrasePositions> >(); IList <FixedBitSet> bb = PpTermsBitSets(rpp, rptTerms); UnionTermGroups(bb); IDictionary <Term, int> tg = TermGroups(rptTerms, bb); HashSet <int> distinctGroupIDs = new HashSet <int>(tg.Values); for (int i = 0; i < distinctGroupIDs.Count; i++) { tmp.Add(new HashSet <PhrasePositions>()); } foreach (PhrasePositions pp in rpp) { foreach (Term t in pp.terms) { if (rptTerms.ContainsKey(t)) { int g = tg[t]; tmp[g].Add(pp); Debug.Assert(pp.rptGroup == -1 || pp.rptGroup == g); pp.rptGroup = g; } } } foreach (HashSet <PhrasePositions> hs in tmp) { res.Add(new List <PhrasePositions>(hs)); } } return(res); }
private static void RecursiveCompile( EvalFactoryNode evalNode, StatementContext context, ExprEvaluatorContext evaluatorContext, ICollection <string> eventTypeReferences, bool isInsertInto, MatchEventSpec tags, Deque <int> subexpressionIdStack, Stack <EvalFactoryNode> parentNodeStack, ICollection <string> allTagNamesOrdered) { var counter = 0; parentNodeStack.Push(evalNode); foreach (var child in evalNode.ChildNodes) { subexpressionIdStack.AddLast(counter++); RecursiveCompile( child, context, evaluatorContext, eventTypeReferences, isInsertInto, tags, subexpressionIdStack, parentNodeStack, allTagNamesOrdered); subexpressionIdStack.RemoveLast(); } parentNodeStack.Pop(); LinkedHashMap <string, Pair <EventType, string> > newTaggedEventTypes = null; LinkedHashMap <string, Pair <EventType, string> > newArrayEventTypes = null; if (evalNode is EvalFilterFactoryNode) { var filterNode = (EvalFilterFactoryNode)evalNode; var eventName = filterNode.RawFilterSpec.EventTypeName; if (context.TableService.GetTableMetadata(eventName) != null) { throw new ExprValidationException("Tables cannot be used in pattern filter atoms"); } var resolvedEventType = FilterStreamSpecRaw.ResolveType( context.EngineURI, eventName, context.EventAdapterService, context.PlugInTypeResolutionURIs); var finalEventType = resolvedEventType; var optionalTag = filterNode.EventAsName; var isPropertyEvaluation = false; var isParentMatchUntil = IsParentMatchUntil(evalNode, parentNodeStack); // obtain property event type, if final event type is properties if (filterNode.RawFilterSpec.OptionalPropertyEvalSpec != null) { var optionalPropertyEvaluator = PropertyEvaluatorFactory.MakeEvaluator( context.Container, filterNode.RawFilterSpec.OptionalPropertyEvalSpec, resolvedEventType, filterNode.EventAsName, context.EventAdapterService, context.EngineImportService, context.SchedulingService, context.VariableService, context.ScriptingService, context.TableService, context.EngineURI, context.StatementId, context.StatementName, context.Annotations, subexpressionIdStack, context.ConfigSnapshot, context.NamedWindowMgmtService, context.StatementExtensionServicesContext); finalEventType = optionalPropertyEvaluator.FragmentEventType; isPropertyEvaluation = true; } if (finalEventType is EventTypeSPI) { eventTypeReferences.Add(((EventTypeSPI)finalEventType).Metadata.PrimaryName); } // If a tag was supplied for the type, the tags must stay with this type, i.e. a=BeanA -> b=BeanA -> a=BeanB is a no if (optionalTag != null) { var pair = tags.TaggedEventTypes.Get(optionalTag); EventType existingType = null; if (pair != null) { existingType = pair.First; } if (existingType == null) { pair = tags.ArrayEventTypes.Get(optionalTag); if (pair != null) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' used in the repeat-until operator cannot also appear in other filter expressions"); } } if ((existingType != null) && (existingType != finalEventType)) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' has already been declared for events of type " + existingType.UnderlyingType.FullName); } pair = new Pair <EventType, string>(finalEventType, eventName); // add tagged type if (isPropertyEvaluation || isParentMatchUntil) { newArrayEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); newArrayEventTypes.Put(optionalTag, pair); } else { newTaggedEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); newTaggedEventTypes.Put(optionalTag, pair); } } // For this filter, filter types are all known tags at this time, // and additionally stream 0 (self) is our event type. // Stream type service allows resolution by property name event if that name appears in other tags. // by defaulting to stream zero. // Stream zero is always the current event type, all others follow the order of the map (stream 1 to N). var selfStreamName = optionalTag; if (selfStreamName == null) { selfStreamName = "s_" + UuidGenerator.Generate(); } var filterTypes = new LinkedHashMap <string, Pair <EventType, string> >(); var typePair = new Pair <EventType, string>(finalEventType, eventName); filterTypes.Put(selfStreamName, typePair); filterTypes.PutAll(tags.TaggedEventTypes); // for the filter, specify all tags used var filterTaggedEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(tags.TaggedEventTypes); filterTaggedEventTypes.Remove(optionalTag); // handle array tags (match-until clause) LinkedHashMap <string, Pair <EventType, string> > arrayCompositeEventTypes = null; if (tags.ArrayEventTypes != null && !tags.ArrayEventTypes.IsEmpty()) { arrayCompositeEventTypes = new LinkedHashMap <string, Pair <EventType, string> >(); var patternSubexEventType = GetPatternSubexEventType( context.StatementId, "pattern", subexpressionIdStack); foreach (var entry in tags.ArrayEventTypes) { var specificArrayType = new LinkedHashMap <string, Pair <EventType, string> >(); specificArrayType.Put(entry.Key, entry.Value); var arrayTagCompositeEventType = context.EventAdapterService.CreateSemiAnonymousMapType( patternSubexEventType, Collections.GetEmptyMap <string, Pair <EventType, string> >(), specificArrayType, isInsertInto); context.StatementSemiAnonymousTypeRegistry.Register(arrayTagCompositeEventType); var tag = entry.Key; if (!filterTypes.ContainsKey(tag)) { var pair = new Pair <EventType, string>(arrayTagCompositeEventType, tag); filterTypes.Put(tag, pair); arrayCompositeEventTypes.Put(tag, pair); } } } StreamTypeService streamTypeService = new StreamTypeServiceImpl( filterTypes, context.EngineURI, true, false); var exprNodes = filterNode.RawFilterSpec.FilterExpressions; var spec = FilterSpecCompiler.MakeFilterSpec( resolvedEventType, eventName, exprNodes, filterNode.RawFilterSpec.OptionalPropertyEvalSpec, filterTaggedEventTypes, arrayCompositeEventTypes, streamTypeService, null, context, subexpressionIdStack); filterNode.FilterSpec = spec; } else if (evalNode is EvalObserverFactoryNode) { var observerNode = (EvalObserverFactoryNode)evalNode; try { var observerFactory = context.PatternResolutionService.Create(observerNode.PatternObserverSpec); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, tags.TaggedEventTypes, tags.ArrayEventTypes, subexpressionIdStack, "observer", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNOBSERVER, observerNode.PatternObserverSpec.ObjectParameters, validationContext); MatchedEventConvertor convertor = new MatchedEventConvertorImpl( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); observerNode.ObserverFactory = observerFactory; observerFactory.SetObserverParameters(validated, convertor, validationContext); } catch (ObserverParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalGuardFactoryNode) { var guardNode = (EvalGuardFactoryNode)evalNode; try { var guardFactory = context.PatternResolutionService.Create(guardNode.PatternGuardSpec); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, tags.TaggedEventTypes, tags.ArrayEventTypes, subexpressionIdStack, "guard", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNGUARD, guardNode.PatternGuardSpec.ObjectParameters, validationContext); MatchedEventConvertor convertor = new MatchedEventConvertorImpl( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); guardNode.GuardFactory = guardFactory; guardFactory.SetGuardParameters(validated, convertor); } catch (GuardParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalEveryDistinctFactoryNode) { var distinctNode = (EvalEveryDistinctFactoryNode)evalNode; var matchEventFromChildNodes = AnalyzeMatchEvent(distinctNode); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, subexpressionIdStack, "every-distinct", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); IList <ExprNode> validated; try { validated = ValidateExpressions( ExprNodeOrigin.PATTERNEVERYDISTINCT, distinctNode.Expressions, validationContext); } catch (ExprValidationPropertyException ex) { throw new ExprValidationPropertyException( ex.Message + ", every-distinct requires that all properties resolve from sub-expressions to the every-distinct", ex.InnerException); } MatchedEventConvertor convertor = new MatchedEventConvertorImpl( matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); distinctNode.Convertor = convertor; // Determine whether some expressions are constants or time period IList <ExprNode> distinctExpressions = new List <ExprNode>(); ExprTimePeriodEvalDeltaConst timeDeltaComputation = null; ExprNode expiryTimeExp = null; var count = -1; var last = validated.Count - 1; foreach (var expr in validated) { count++; if (count == last && expr is ExprTimePeriod) { expiryTimeExp = expr; var timePeriodExpr = (ExprTimePeriod)expiryTimeExp; timeDeltaComputation = timePeriodExpr.ConstEvaluator(new ExprEvaluatorContextStatement(context, false)); } else if (expr.IsConstantResult) { if (count == last) { var evaluateParams = new EvaluateParams(null, true, evaluatorContext); var value = expr.ExprEvaluator.Evaluate(evaluateParams); if (!(value.IsNumber())) { throw new ExprValidationException( "Invalid parameter for every-distinct, expected number of seconds constant (constant not considered for distinct)"); } var secondsExpire = expr.ExprEvaluator.Evaluate(evaluateParams); long?timeExpire; if (secondsExpire == null) { timeExpire = null; } else { timeExpire = context.TimeAbacus.DeltaForSecondsNumber(secondsExpire); } if (timeExpire != null && timeExpire > 0) { timeDeltaComputation = new ExprTimePeriodEvalDeltaConstGivenDelta(timeExpire.Value); expiryTimeExp = expr; } else { Log.Warn("Invalid seconds-expire " + timeExpire + " for " + ExprNodeUtility.ToExpressionStringMinPrecedenceSafe(expr)); } } else { Log.Warn( "Every-distinct node utilizes an expression returning a constant value, please check expression '{0}', not adding expression to distinct-value expression list", expr.ToExpressionStringMinPrecedenceSafe()); } } else { distinctExpressions.Add(expr); } } if (distinctExpressions.IsEmpty()) { throw new ExprValidationException( "Every-distinct node requires one or more distinct-value expressions that each return non-constant result values"); } distinctNode.SetDistinctExpressions(distinctExpressions, timeDeltaComputation, expiryTimeExp); } else if (evalNode is EvalMatchUntilFactoryNode) { var matchUntilNode = (EvalMatchUntilFactoryNode)evalNode; // compile bounds expressions, if any var untilMatchEventSpec = new MatchEventSpec(tags.TaggedEventTypes, tags.ArrayEventTypes); var streamTypeService = GetStreamTypeService( context.EngineURI, context.StatementId, context.EventAdapterService, untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, subexpressionIdStack, "until", context); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); var lower = ValidateBounds(matchUntilNode.LowerBounds, validationContext); matchUntilNode.LowerBounds = lower; var upper = ValidateBounds(matchUntilNode.UpperBounds, validationContext); matchUntilNode.UpperBounds = upper; var single = ValidateBounds(matchUntilNode.SingleBound, validationContext); matchUntilNode.SingleBound = single; var convertor = new MatchedEventConvertorImpl( untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, allTagNamesOrdered, context.EventAdapterService); matchUntilNode.Convertor = convertor; // compile new tag lists ISet <string> arrayTags = null; var matchUntilAnalysisResult = EvalNodeUtil.RecursiveAnalyzeChildNodes(matchUntilNode.ChildNodes[0]); foreach (var filterNode in matchUntilAnalysisResult.FilterNodes) { var optionalTag = filterNode.EventAsName; if (optionalTag != null) { if (arrayTags == null) { arrayTags = new HashSet <string>(); } arrayTags.Add(optionalTag); } } if (arrayTags != null) { foreach (var arrayTag in arrayTags) { if (!tags.ArrayEventTypes.ContainsKey(arrayTag)) { tags.ArrayEventTypes.Put(arrayTag, tags.TaggedEventTypes.Get(arrayTag)); tags.TaggedEventTypes.Remove(arrayTag); } } } matchUntilNode.TagsArrayed = GetIndexesForTags(allTagNamesOrdered, arrayTags); } else if (evalNode is EvalFollowedByFactoryNode) { var followedByNode = (EvalFollowedByFactoryNode)evalNode; StreamTypeService streamTypeService = new StreamTypeServiceImpl(context.EngineURI, false); var validationContext = new ExprValidationContext( context.Container, streamTypeService, context.EngineImportService, context.StatementExtensionServicesContext, null, context.SchedulingService, context.VariableService, context.TableService, evaluatorContext, context.EventAdapterService, context.StatementName, context.StatementId, context.Annotations, context.ContextDescriptor, context.ScriptingService, false, false, false, false, null, false); if (followedByNode.OptionalMaxExpressions != null) { IList <ExprNode> validated = new List <ExprNode>(); foreach (var maxExpr in followedByNode.OptionalMaxExpressions) { if (maxExpr == null) { validated.Add(null); } else { var visitor = new ExprNodeSummaryVisitor(); maxExpr.Accept(visitor); if (!visitor.IsPlain) { var errorMessage = "Invalid maximum expression in followed-by, " + visitor.GetMessage() + " are not allowed within the expression"; Log.Error(errorMessage); throw new ExprValidationException(errorMessage); } var validatedExpr = ExprNodeUtility.GetValidatedSubtree( ExprNodeOrigin.FOLLOWEDBYMAX, maxExpr, validationContext); validated.Add(validatedExpr); if ((validatedExpr.ExprEvaluator.ReturnType == null) || (!validatedExpr.ExprEvaluator.ReturnType.IsNumeric())) { var message = "Invalid maximum expression in followed-by, the expression must return an integer value"; throw new ExprValidationException(message); } } } followedByNode.OptionalMaxExpressions = validated; } } if (newTaggedEventTypes != null) { tags.TaggedEventTypes.PutAll(newTaggedEventTypes); } if (newArrayEventTypes != null) { tags.ArrayEventTypes.PutAll(newArrayEventTypes); } }
public virtual Template Walk(OutputModelObject omo, bool header) { // CREATE TEMPLATE FOR THIS OUTPUT OBJECT Type cl = omo.GetType(); string templateName = cl.Name; if (templateName == null) { tool.errMgr.ToolError(ErrorType.NO_MODEL_TO_TEMPLATE_MAPPING, cl.Name); return new Template("[" + templateName + " invalid]"); } if (header) templateName += "Header"; Template st = templates.GetInstanceOf(templateName); if (st == null) { tool.errMgr.ToolError(ErrorType.CODE_GEN_TEMPLATES_INCOMPLETE, templateName); return new Template("[" + templateName + " invalid]"); } if (st.impl.FormalArguments == null) { tool.errMgr.ToolError(ErrorType.CODE_TEMPLATE_ARG_ISSUE, templateName, "<none>"); return st; } IDictionary<string, FormalArgument> formalArgs = new LinkedHashMap<string, FormalArgument>(); foreach (var argument in st.impl.FormalArguments) formalArgs[argument.Name] = argument; // PASS IN OUTPUT MODEL OBJECT TO TEMPLATE AS FIRST ARG string modelArgName = st.impl.FormalArguments[0].Name; st.Add(modelArgName, omo); // COMPUTE STs FOR EACH NESTED MODEL OBJECT MARKED WITH @ModelElement AND MAKE ST ATTRIBUTE ISet<string> usedFieldNames = new HashSet<string>(); IEnumerable<FieldInfo> fields = GetFields(cl); foreach (FieldInfo fi in fields) { ModelElementAttribute annotation = fi.GetCustomAttribute<ModelElementAttribute>(); if (annotation == null) { continue; } string fieldName = fi.Name; if (!usedFieldNames.Add(fieldName)) { tool.errMgr.ToolError(ErrorType.INTERNAL_ERROR, "Model object " + omo.GetType().Name + " has multiple fields named '" + fieldName + "'"); continue; } // Just don't set [ModelElement] fields w/o formal argument in target ST if (!formalArgs.ContainsKey(fieldName)) continue; try { object o = fi.GetValue(omo); if (o is OutputModelObject) { // SINGLE MODEL OBJECT? OutputModelObject nestedOmo = (OutputModelObject)o; Template nestedST = Walk(nestedOmo, header); //System.Console.WriteLine("set ModelElement " + fieldName + "=" + nestedST + " in " + templateName); st.Add(fieldName, nestedST); } else if (o is IDictionary) { IDictionary nestedOmoMap = (IDictionary)o; IDictionary<object, Template> m = new LinkedHashMap<object, Template>(); for (IDictionaryEnumerator enumerator = nestedOmoMap.GetEnumerator(); enumerator.MoveNext(); ) { DictionaryEntry entry = enumerator.Entry; Template nestedST = Walk((OutputModelObject)entry.Value, header); //System.Console.WriteLine("set ModelElement " + fieldName + "=" + nestedST + " in " + templateName); m[entry.Key] = nestedST; } st.Add(fieldName, m); } else if (o is IEnumerable && !(o is string)) { // LIST OF MODEL OBJECTS? IEnumerable nestedOmos = (IEnumerable)o; foreach (object nestedOmo in nestedOmos) { if (nestedOmo == null) continue; Template nestedST = Walk((OutputModelObject)nestedOmo, header); //System.Console.WriteLine("set ModelElement " + fieldName + "=" + nestedST + " in " + templateName); st.Add(fieldName, nestedST); } } else if (o != null) { tool.errMgr.ToolError(ErrorType.INTERNAL_ERROR, "not recognized nested model element: " + fieldName); } } catch (FieldAccessException) { tool.errMgr.ToolError(ErrorType.CODE_TEMPLATE_ARG_ISSUE, templateName, fieldName); } } //st.impl.Dump(); return st; }
public static StreamTypeService BuildDefineStreamTypeServiceDefine( StatementContext statementContext, LinkedHashMap <String, Pair <int, bool> > variableStreams, MatchRecognizeDefineItem defineItem, IDictionary <String, ISet <String> > visibilityByIdentifier, EventType parentViewType) { if (!variableStreams.ContainsKey(defineItem.Identifier)) { throw new ExprValidationException("Variable '" + defineItem.Identifier + "' does not occur in pattern"); } var streamNamesDefine = new String[variableStreams.Count + 1]; var typesDefine = new EventType[variableStreams.Count + 1]; var isIStreamOnly = new bool[variableStreams.Count + 1]; CompatExtensions.Fill(isIStreamOnly, true); var streamNumDefine = variableStreams.Get(defineItem.Identifier).First; streamNamesDefine[streamNumDefine] = defineItem.Identifier; typesDefine[streamNumDefine] = parentViewType; // add visible single-value var visibles = visibilityByIdentifier.Get(defineItem.Identifier); var hasVisibleMultimatch = false; if (visibles != null) { foreach (var visible in visibles) { var def = variableStreams.Get(visible); if (!def.Second) { streamNamesDefine[def.First] = visible; typesDefine[def.First] = parentViewType; } else { hasVisibleMultimatch = true; } } } // compile multi-matching event type (in last position), if any are used if (hasVisibleMultimatch) { IDictionary <String, Object> multievent = new LinkedHashMap <String, Object>(); foreach (var entry in variableStreams) { var identifier = entry.Key; if (entry.Value.Second) { if (visibles.Contains(identifier)) { multievent.Put( identifier, new EventType[] { parentViewType }); } else { multievent.Put("esper_matchrecog_internal", null); } } } var multimatch = statementContext.EventAdapterService.CreateAnonymousObjectArrayType( "esper_matchrecog_internal", multievent); typesDefine[typesDefine.Length - 1] = multimatch; streamNamesDefine[streamNamesDefine.Length - 1] = multimatch.Name; } return(new StreamTypeServiceImpl( typesDefine, streamNamesDefine, isIStreamOnly, statementContext.EngineURI, false)); }
// function WEIGHTED-SAMPLE(bn, e) returns an event and a weight /** * The WEIGHTED-SAMPLE function in Figure 14.15. * * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @return return <b>x</b>, w - an event with its associated weight. */ public Pair<Map<RandomVariable, Object>, Double> weightedSample( BayesianNetwork bn, AssignmentProposition[] e) { // w <- 1; double w = 1.0; // <b>x</b> <- an event with n elements initialized from e Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>(); foreach (AssignmentProposition ap in e) { x.Add(ap.getTermVariable(), ap.getValue()); } // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do foreach (RandomVariable Xi in bn.getVariablesInTopologicalOrder()) { // if X<sub>i</sub> is an evidence variable with value x<sub>i</sub> // in e if (x.ContainsKey(Xi)) { // then w <- w * P(X<sub>i</sub> = x<sub>i</sub> | // parents(X<sub>i</sub>)) w *= bn.getNode(Xi) .getCPD() .getValue( ProbUtil.getEventValuesForXiGivenParents( bn.getNode(Xi), x)); } else { // else <b>x</b>[i] <- a random sample from // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>)) x.Add(Xi, ProbUtil.randomSample(bn.getNode(Xi), x, randomizer)); } } // return <b>x</b>, w return new Pair<Map<RandomVariable, Object>, Double>(x, w); }
private static void RecursiveCompile( EvalForgeNode evalNode, ISet<string> eventTypeReferences, bool isInsertInto, MatchEventSpec tags, Stack<EvalForgeNode> parentNodeStack, ISet<string> allTagNamesOrdered, int streamNum, StatementRawInfo statementRawInfo, StatementCompileTimeServices services) { parentNodeStack.Push(evalNode); foreach (var child in evalNode.ChildNodes) { RecursiveCompile( child, eventTypeReferences, isInsertInto, tags, parentNodeStack, allTagNamesOrdered, streamNum, statementRawInfo, services); } parentNodeStack.Pop(); IDictionary<string, Pair<EventType, string>> newTaggedEventTypes = null; IDictionary<string, Pair<EventType, string>> newArrayEventTypes = null; if (evalNode is EvalFilterForgeNode) { var filterNode = (EvalFilterForgeNode) evalNode; var eventName = filterNode.RawFilterSpec.EventTypeName; if (services.TableCompileTimeResolver.Resolve(eventName) != null) { throw new ExprValidationException("Tables cannot be used in pattern filter atoms"); } var resolvedEventType = ResolveTypeName(eventName, services.EventTypeCompileTimeResolver); var finalEventType = resolvedEventType; var optionalTag = filterNode.EventAsName; var isPropertyEvaluation = false; var isParentMatchUntil = IsParentMatchUntil(evalNode, parentNodeStack); // obtain property event type, if final event type is properties if (filterNode.RawFilterSpec.OptionalPropertyEvalSpec != null) { var optionalPropertyEvaluator = PropertyEvaluatorForgeFactory.MakeEvaluator( filterNode.RawFilterSpec.OptionalPropertyEvalSpec, resolvedEventType, filterNode.EventAsName, statementRawInfo, services); finalEventType = optionalPropertyEvaluator.FragmentEventType; isPropertyEvaluation = true; } // If a tag was supplied for the type, the tags must stay with this type, i.e. a=BeanA -> b=BeanA -> a=BeanB is a no if (optionalTag != null) { var pair = tags.TaggedEventTypes.Get(optionalTag); EventType existingType = null; if (pair != null) { existingType = pair.First; } if (existingType == null) { pair = tags.ArrayEventTypes.Get(optionalTag); if (pair != null) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' used in the repeat-until operator cannot also appear in other filter expressions"); } } if (existingType != null && existingType != finalEventType) { throw new ExprValidationException( "Tag '" + optionalTag + "' for event '" + eventName + "' has already been declared for events of type " + existingType.UnderlyingType.Name); } pair = new Pair<EventType, string>(finalEventType, eventName); // add tagged type if (isPropertyEvaluation || isParentMatchUntil) { newArrayEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(); newArrayEventTypes.Put(optionalTag, pair); } else { newTaggedEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(); newTaggedEventTypes.Put(optionalTag, pair); } } // For this filter, filter types are all known tags at this time, // and additionally stream 0 (self) is our event type. // Stream type service allows resolution by property name event if that name appears in other tags. // by defaulting to stream zero. // Stream zero is always the current event type, all others follow the order of the map (stream 1 to N). var selfStreamName = optionalTag; if (selfStreamName == null) { selfStreamName = "s_" + UuidGenerator.Generate(); } var filterTypes = new LinkedHashMap<string, Pair<EventType, string>>(); var typePair = new Pair<EventType, string>(finalEventType, eventName); filterTypes.Put(selfStreamName, typePair); filterTypes.PutAll(tags.TaggedEventTypes); // for the filter, specify all tags used var filterTaggedEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(tags.TaggedEventTypes); filterTaggedEventTypes.Remove(optionalTag); // handle array tags (match-until clause) IDictionary<string, Pair<EventType, string>> arrayCompositeEventTypes = null; if (tags.ArrayEventTypes != null && !tags.ArrayEventTypes.IsEmpty()) { arrayCompositeEventTypes = new LinkedHashMap<string, Pair<EventType, string>>(); foreach (var entry in tags.ArrayEventTypes) { var specificArrayType = new LinkedHashMap<string, Pair<EventType, string>>(); specificArrayType.Put(entry.Key, entry.Value); var eventTypeName = services.EventTypeNameGeneratorStatement.GetAnonymousPatternNameWTag( streamNum, evalNode.FactoryNodeId, entry.Key); var mapProps = GetMapProperties( Collections.GetEmptyMap<string, Pair<EventType, string>>(), specificArrayType); var metadata = new EventTypeMetadata( eventTypeName, statementRawInfo.ModuleName, EventTypeTypeClass.PATTERNDERIVED, EventTypeApplicationType.MAP, NameAccessModifier.TRANSIENT, EventTypeBusModifier.NONBUS, false, EventTypeIdPair.Unassigned()); var mapEventType = BaseNestableEventUtil.MakeMapTypeCompileTime( metadata, mapProps, null, null, null, null, services.BeanEventTypeFactoryPrivate, services.EventTypeCompileTimeResolver); services.EventTypeCompileTimeRegistry.NewType(mapEventType); var tag = entry.Key; if (!filterTypes.ContainsKey(tag)) { var pair = new Pair<EventType, string>(mapEventType, tag); filterTypes.Put(tag, pair); arrayCompositeEventTypes.Put(tag, pair); } } } StreamTypeService streamTypeService = new StreamTypeServiceImpl(filterTypes, true, false); var exprNodes = filterNode.RawFilterSpec.FilterExpressions; var spec = FilterSpecCompiler.MakeFilterSpec( resolvedEventType, eventName, exprNodes, filterNode.RawFilterSpec.OptionalPropertyEvalSpec, filterTaggedEventTypes, arrayCompositeEventTypes, streamTypeService, null, statementRawInfo, services); filterNode.FilterSpec = spec; } else if (evalNode is EvalObserverForgeNode) { var observerNode = (EvalObserverForgeNode) evalNode; try { var observerForge = services.PatternResolutionService.Create(observerNode.PatternObserverSpec); var streamTypeService = GetStreamTypeService( tags.TaggedEventTypes, tags.ArrayEventTypes, observerNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder( streamTypeService, statementRawInfo, services).Build(); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNOBSERVER, observerNode.PatternObserverSpec.ObjectParameters, validationContext); var convertor = new MatchedEventConvertorForge( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered); observerNode.ObserverFactory = observerForge; observerForge.SetObserverParameters(validated, convertor, validationContext); } catch (ObserverParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern observer '" + observerNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalGuardForgeNode) { var guardNode = (EvalGuardForgeNode) evalNode; try { var guardForge = services.PatternResolutionService.Create(guardNode.PatternGuardSpec); var streamTypeService = GetStreamTypeService( tags.TaggedEventTypes, tags.ArrayEventTypes, guardNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder( streamTypeService, statementRawInfo, services).Build(); var validated = ValidateExpressions( ExprNodeOrigin.PATTERNGUARD, guardNode.PatternGuardSpec.ObjectParameters, validationContext); var convertor = new MatchedEventConvertorForge( tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered); guardNode.GuardForge = guardForge; guardForge.SetGuardParameters(validated, convertor, services); } catch (GuardParameterException e) { throw new ExprValidationException( "Invalid parameter for pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } catch (PatternObjectException e) { throw new ExprValidationException( "Failed to resolve pattern guard '" + guardNode.ToPrecedenceFreeEPL() + "': " + e.Message, e); } } else if (evalNode is EvalEveryDistinctForgeNode) { var distinctNode = (EvalEveryDistinctForgeNode) evalNode; var matchEventFromChildNodes = AnalyzeMatchEvent(distinctNode); var streamTypeService = GetStreamTypeService( matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, distinctNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder(streamTypeService, statementRawInfo, services).Build(); IList<ExprNode> validated; try { validated = ValidateExpressions( ExprNodeOrigin.PATTERNEVERYDISTINCT, distinctNode.Expressions, validationContext); } catch (ExprValidationPropertyException ex) { throw new ExprValidationPropertyException( ex.Message + ", every-distinct requires that all properties resolve from sub-expressions to the every-distinct", ex.InnerException); } var convertor = new MatchedEventConvertorForge( matchEventFromChildNodes.TaggedEventTypes, matchEventFromChildNodes.ArrayEventTypes, allTagNamesOrdered); distinctNode.Convertor = convertor; // Determine whether some expressions are constants or time period IList<ExprNode> distinctExpressions = new List<ExprNode>(); TimePeriodComputeForge timePeriodComputeForge = null; ExprNode expiryTimeExp = null; var count = -1; var last = validated.Count - 1; foreach (var expr in validated) { count++; if (count == last && expr is ExprTimePeriod) { expiryTimeExp = expr; var timePeriodExpr = (ExprTimePeriod) expiryTimeExp; timePeriodComputeForge = timePeriodExpr.TimePeriodComputeForge; } else if (expr.Forge.ForgeConstantType.IsCompileTimeConstant) { if (count == last) { var value = expr.Forge.ExprEvaluator.Evaluate(null, true, null); if (!value.IsNumber()) { throw new ExprValidationException( "Invalid parameter for every-distinct, expected number of seconds constant (constant not considered for distinct)"); } var secondsExpire = expr.Forge.ExprEvaluator.Evaluate(null, true, null); var timeExpire = secondsExpire == null ? (long?) null : (long?) services.ImportServiceCompileTime.TimeAbacus.DeltaForSecondsNumber( secondsExpire); if (timeExpire != null && timeExpire > 0) { timePeriodComputeForge = new TimePeriodComputeConstGivenDeltaForge(timeExpire.Value); expiryTimeExp = expr; } else { Log.Warn( "Invalid seconds-expire " + timeExpire + " for " + ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(expr)); } } else { Log.Warn( "Every-distinct node utilizes an expression returning a constant value, please check expression '" + ExprNodeUtilityPrint.ToExpressionStringMinPrecedenceSafe(expr) + "', not adding expression to distinct-value expression list"); } } else { distinctExpressions.Add(expr); } } if (distinctExpressions.IsEmpty()) { throw new ExprValidationException( "Every-distinct node requires one or more distinct-value expressions that each return non-constant result values"); } distinctNode.SetDistinctExpressions(distinctExpressions, timePeriodComputeForge, expiryTimeExp); } else if (evalNode is EvalMatchUntilForgeNode) { var matchUntilNode = (EvalMatchUntilForgeNode) evalNode; // compile bounds expressions, if any var untilMatchEventSpec = new MatchEventSpec(tags.TaggedEventTypes, tags.ArrayEventTypes); var streamTypeService = GetStreamTypeService( untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, matchUntilNode, streamNum, statementRawInfo, services); var validationContext = new ExprValidationContextBuilder(streamTypeService, statementRawInfo, services).Build(); var lower = ValidateBounds(matchUntilNode.LowerBounds, validationContext); matchUntilNode.LowerBounds = lower; var upper = ValidateBounds(matchUntilNode.UpperBounds, validationContext); matchUntilNode.UpperBounds = upper; var single = ValidateBounds(matchUntilNode.SingleBound, validationContext); matchUntilNode.SingleBound = single; bool tightlyBound; if (matchUntilNode.SingleBound != null) { ValidateMatchUntil(matchUntilNode.SingleBound, matchUntilNode.SingleBound, false); tightlyBound = true; } else { var allowZeroLowerBounds = matchUntilNode.LowerBounds != null && matchUntilNode.UpperBounds != null; tightlyBound = ValidateMatchUntil( matchUntilNode.LowerBounds, matchUntilNode.UpperBounds, allowZeroLowerBounds); } if (matchUntilNode.SingleBound == null && !tightlyBound && matchUntilNode.ChildNodes.Count < 2) { throw new ExprValidationException("Variable bounds repeat operator requires an until-expression"); } var convertor = new MatchedEventConvertorForge( untilMatchEventSpec.TaggedEventTypes, untilMatchEventSpec.ArrayEventTypes, allTagNamesOrdered); matchUntilNode.Convertor = convertor; // compile new tag lists ISet<string> arrayTags = null; var matchUntilAnalysisResult = EvalNodeUtil.RecursiveAnalyzeChildNodes(matchUntilNode.ChildNodes[0]); foreach (var filterNode in matchUntilAnalysisResult.FilterNodes) { var optionalTag = filterNode.EventAsName; if (optionalTag != null) { if (arrayTags == null) { arrayTags = new HashSet<string>(); } arrayTags.Add(optionalTag); } } if (arrayTags != null) { foreach (var arrayTag in arrayTags) { if (!tags.ArrayEventTypes.ContainsKey(arrayTag)) { tags.ArrayEventTypes.Put(arrayTag, tags.TaggedEventTypes.Get(arrayTag)); tags.TaggedEventTypes.Remove(arrayTag); } } } matchUntilNode.TagsArrayedSet = GetIndexesForTags(allTagNamesOrdered, arrayTags); } else if (evalNode is EvalFollowedByForgeNode) { var followedByNode = (EvalFollowedByForgeNode) evalNode; StreamTypeService streamTypeService = new StreamTypeServiceImpl(false); var validationContext = new ExprValidationContextBuilder(streamTypeService, statementRawInfo, services).Build(); if (followedByNode.OptionalMaxExpressions != null) { IList<ExprNode> validated = new List<ExprNode>(); foreach (var maxExpr in followedByNode.OptionalMaxExpressions) { if (maxExpr == null) { validated.Add(null); } else { var visitor = new ExprNodeSummaryVisitor(); maxExpr.Accept(visitor); if (!visitor.IsPlain) { var errorMessage = "Invalid maximum expression in followed-by, " + visitor.Message + " are not allowed within the expression"; Log.Error(errorMessage); throw new ExprValidationException(errorMessage); } var validatedExpr = ExprNodeUtilityValidate.GetValidatedSubtree( ExprNodeOrigin.FOLLOWEDBYMAX, maxExpr, validationContext); validated.Add(validatedExpr); var returnType = validatedExpr.Forge.EvaluationType; if (returnType == null || !returnType.IsNumeric()) { var message = "Invalid maximum expression in followed-by, the expression must return an integer value"; throw new ExprValidationException(message); } } } followedByNode.OptionalMaxExpressions = validated; } } if (newTaggedEventTypes != null) { tags.TaggedEventTypes.PutAll(newTaggedEventTypes); } if (newArrayEventTypes != null) { tags.ArrayEventTypes.PutAll(newArrayEventTypes); } }