public HashSet<Action> actions(Object state) { EightPuzzleBoard board = (EightPuzzleBoard)state; HashSet<Action> actions = new LinkedHashSet<Action>(); if (board.canMoveGap(EightPuzzleBoard.UP)) { actions.Add(EightPuzzleBoard.UP); } if (board.canMoveGap(EightPuzzleBoard.DOWN)) { actions.Add(EightPuzzleBoard.DOWN); } if (board.canMoveGap(EightPuzzleBoard.LEFT)) { actions.Add(EightPuzzleBoard.LEFT); } if (board.canMoveGap(EightPuzzleBoard.RIGHT)) { actions.Add(EightPuzzleBoard.RIGHT); } return actions; }
public void ShouldPreserveOrderingOnUnion() { var set = new LinkedHashSet<int> { 1, 10, 5 }; set.UnionWith(new int[] { 10, 30, 15 }); Assert.That(set, Has.Count.EqualTo(5)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5, 30, 15 })); }
public void ShouldPreserveOrderingOnExcept() { var set = new LinkedHashSet<int> { 1, 10, 5, 7, 8, 9 }; set.ExceptWith(new int[] { 7, 10, 9, 18 }); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 5, 8 })); }
public void CanIterateInInsertionOrder() { // Deliberatly add in an order different from the natural ordering. var set = new LinkedHashSet<int> { 1, 10, 5 }; Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5 })); }
public void ReinsertShouldNotAffectOrdering() { // Deliberatly add in an order different from the natural ordering. var set = new LinkedHashSet<int> { 1, 10, 5 }; var added = set.Add(1); // This element should still be first in the list. Assert.That(added, Is.False); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 1, 10, 5 })); }
public void TestConstructorCollection() { LinkedHashSet<Object> hs2 = new LinkedHashSet<Object>(Arrays.AsList(objArray)); for (int counter = 0; counter < objArray.Length; counter++) { Assert.IsTrue(hs.Contains(objArray[counter]), "LinkedHashSet does not contain correct elements"); } Assert.IsTrue(hs2.Size() == objArray.Length, "LinkedHashSet created from collection incorrect size"); }
public HashSet<Action> actions(System.Object state) { HashSet<Action> actions = new LinkedHashSet<Action>(); System.String location = state.ToString(); List<System.String> linkedLocations = map.getLocationsLinkedTo(location); foreach (System.String linkLoc in linkedLocations) { actions.Add(new MoveToAction(linkLoc)); } return actions; }
public void TestConstructorI() { LinkedHashSet<Object> hs2 = new LinkedHashSet<Object>(5); Assert.AreEqual(0, hs2.Size(), "Created incorrect LinkedHashSet"); try { new LinkedHashSet<Object>(-1); } catch (ArgumentException) { return; } Assert.Fail("Failed to throw ArgumentException for capacity < 0"); }
public void TestConstructorIF() { LinkedHashSet<Object> hs2 = new LinkedHashSet<Object>(5, (float) 0.5); Assert.AreEqual(0, hs2.Size(), "Created incorrect LinkedHashSet"); try { new LinkedHashSet<Object>(0, 0); } catch (ArgumentException) { return; } Assert.Fail("Failed to throw ArgumentException for initial load factor <= 0"); }
public HashSet<Action> actions(Object state) { NQueensBoard board = (NQueensBoard)state; HashSet<Action> actions = new LinkedHashSet<Action>(); int numQueens = board.getNumberOfQueensOnBoard(); int boardSize = board.getSize(); for (int i = 0; i < boardSize; i++) { XYLocation newLocation = new XYLocation(numQueens, i); if (!(board.isSquareUnderAttack(newLocation))) { actions.Add(new QueenAction(QueenAction.PLACE_QUEEN, newLocation)); } } return actions; }
/* TODO: looks like the following are function delegates, replace public ModelBasedReflexVacuumAgent() { super(new ModelBasedReflexAgentProgram() { protected override void init() { setState(new DynamicState()); setRules(getRuleSet()); } protected DynamicState updateState(DynamicState state, Action anAction, Percept percept, Model model) { VacuumEnvPercept vep = (VacuumEnvPercept) percept; state.setAttribute(ATTRIBUTE_CURRENT_LOCATION, vep .getAgentLocation()); state.setAttribute(ATTRIBUTE_CURRENT_STATE, vep .getLocationState()); // Keep track of the state of the different locations if (VacuumEnvironment.LOCATION_A == vep.getAgentLocation()) { state.setAttribute(ATTRIBUTE_STATE_LOCATION_A, vep .getLocationState()); } else { state.setAttribute(ATTRIBUTE_STATE_LOCATION_B, vep .getLocationState()); } return state; } }); * */ // // PRIVATE METHODS // private static HashSet<Rule> getRuleSet() { // Note: Using a LinkedHashSet so that the iteration order (i.e. implied // precedence) of rules can be guaranteed. HashSet<Rule> rules = new LinkedHashSet<Rule>(); rules.Add(new Rule(new ANDCondition(new EQUALCondition( ATTRIBUTE_STATE_LOCATION_A, VacuumEnvironment.LocationState.Clean), new EQUALCondition( ATTRIBUTE_STATE_LOCATION_B, VacuumEnvironment.LocationState.Clean)), NoOpAction.NO_OP)); rules.Add(new Rule(new EQUALCondition(ATTRIBUTE_CURRENT_STATE, VacuumEnvironment.LocationState.Dirty), VacuumEnvironment.ACTION_SUCK)); rules.Add(new Rule(new EQUALCondition(ATTRIBUTE_CURRENT_LOCATION, VacuumEnvironment.LOCATION_A), VacuumEnvironment.ACTION_MOVE_RIGHT)); rules.Add(new Rule(new EQUALCondition(ATTRIBUTE_CURRENT_LOCATION, VacuumEnvironment.LOCATION_B), VacuumEnvironment.ACTION_MOVE_LEFT)); return rules; }
// // PRIVATE METHODS // private static HashSet<Rule> getRuleSet() { // Note: Using a LinkedHashSet so that the iteration order (i.e. implied // precedence) of rules can be guaranteed. HashSet<Rule> rules = new LinkedHashSet<Rule>(); // Rules based on REFLEX-VACUUM-AGENT: // Artificial Intelligence A Modern Approach (3rd Edition): Figure 2.8, // page 48. rules.Add(new Rule(new EQUALCondition(VacuumEnvPercept.ATTRIBUTE_STATE, VacuumEnvironment.LocationState.Dirty), VacuumEnvironment.ACTION_SUCK)); rules.Add(new Rule(new EQUALCondition( VacuumEnvPercept.ATTRIBUTE_AGENT_LOCATION, VacuumEnvironment.LOCATION_A), VacuumEnvironment.ACTION_MOVE_RIGHT)); rules.Add(new Rule(new EQUALCondition( VacuumEnvPercept.ATTRIBUTE_AGENT_LOCATION, VacuumEnvironment.LOCATION_B), VacuumEnvironment.ACTION_MOVE_LEFT)); return rules; }
/// <summary> /// Convert the input into a pcore molecule. /// </summary> /// <param name="input">the compound being converted from</param> /// <returns>pcore molecule </returns> /// <exception cref="CDKException">match failed</exception> private IAtomContainer GetPharmacophoreMolecule(IAtomContainer input) { // XXX: prepare query, to be moved PrepareInput(input); var pharmacophoreMolecule = input.Builder.NewAtomContainer(); var matched = new HashSet <string>(); var uniqueAtoms = new LinkedHashSet <PharmacophoreAtom>(); Debug.WriteLine($"Converting [{input.Title}] to a pcore molecule"); // lets loop over each pcore query atom foreach (var atom in pharmacophoreQuery.Atoms) { var qatom = (PharmacophoreQueryAtom)atom; var smarts = qatom.Smarts; // a pcore query might have multiple instances of a given pcore atom (say // 2 hydrophobic groups separated by X unit). In such a case we want to find // the atoms matching the pgroup SMARTS just once, rather than redoing the // matching for each instance of the pcore query atom. if (!matched.Add(qatom.Symbol)) { continue; } // see if the smarts for this pcore query atom gets any matches // in our query molecule. If so, then collect each set of // matching atoms and for each set make a new pcore atom and // add it to the pcore atom container object int count = 0; foreach (var query in qatom.CompiledSmarts) { // create the lazy mappings iterator var mappings = query.MatchAll(input).GetUniqueAtoms(); foreach (var mapping in mappings) { uniqueAtoms.Add(NewPCoreAtom(input, qatom, smarts, mapping)); count++; } } Debug.WriteLine($"\tFound {count} unique matches for {smarts}"); } pharmacophoreMolecule.SetAtoms(uniqueAtoms.ToArray()); // now that we have added all the pcore atoms to the container // we need to join all atoms with pcore bonds (i.e. distance constraints) if (HasDistanceConstraints(pharmacophoreQuery)) { var npatom = pharmacophoreMolecule.Atoms.Count; for (int i = 0; i < npatom - 1; i++) { for (int j = i + 1; j < npatom; j++) { var atom1 = PharmacophoreAtom.Get(pharmacophoreMolecule.Atoms[i]); var atom2 = PharmacophoreAtom.Get(pharmacophoreMolecule.Atoms[j]); var bond = new PharmacophoreBond(atom1, atom2); pharmacophoreMolecule.Bonds.Add(bond); } } } // if we have angle constraints, generate only the valid // possible angle relationships, rather than all possible if (HasAngleConstraints(pharmacophoreQuery)) { int nangleDefs = 0; foreach (var bond in pharmacophoreQuery.Bonds) { if (!(bond is PharmacophoreQueryAngleBond)) { continue; } var startQAtom = bond.Atoms[0]; var middleQAtom = bond.Atoms[1]; var endQAtom = bond.Atoms[2]; // make a list of the patoms in the target that match // each type of angle atom var startl = new List <IAtom>(); var middlel = new List <IAtom>(); var endl = new List <IAtom>(); foreach (var tatom in pharmacophoreMolecule.Atoms) { if (tatom.Symbol.Equals(startQAtom.Symbol, StringComparison.Ordinal)) { startl.Add(tatom); } if (tatom.Symbol.Equals(middleQAtom.Symbol, StringComparison.Ordinal)) { middlel.Add(tatom); } if (tatom.Symbol.Equals(endQAtom.Symbol, StringComparison.Ordinal)) { endl.Add(tatom); } } // now we form the relevant angles, but we will // have reversed repeats var tmpl = new List <IAtom[]>(); foreach (var middle in middlel) { foreach (var start in startl) { if (middle.Equals(start)) { continue; } foreach (var end in endl) { if (start.Equals(end) || middle.Equals(end)) { continue; } tmpl.Add(new IAtom[] { start, middle, end }); } } } // now clean up reversed repeats var unique = new List <IAtom[]>(); for (int i = 0; i < tmpl.Count; i++) { var seq1 = tmpl[i]; bool isRepeat = false; for (int j = 0; j < unique.Count; j++) { if (i == j) { continue; } var seq2 = unique[j]; if (Compares.AreDeepEqual(seq1[1], seq2[1]) && Compares.AreDeepEqual(seq1[0], seq2[2]) && Compares.AreDeepEqual(seq1[2], seq2[0])) { isRepeat = true; } } if (!isRepeat) { unique.Add(seq1); } } // finally we can add the unique angle to the target foreach (var seq in unique) { var pbond = new PharmacophoreAngleBond(PharmacophoreAtom.Get(seq[0]), PharmacophoreAtom.Get(seq[1]), PharmacophoreAtom.Get(seq[2])); pharmacophoreMolecule.Bonds.Add(pbond); nangleDefs++; } } Debug.WriteLine($"Added {nangleDefs} defs to the target pcore molecule"); } return(pharmacophoreMolecule); }
public EventBean[] Lookup(EventBean[] newData, ExprEvaluatorContext exprEvaluatorContext) { if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().QInfraTriggeredLookup(SubordWMatchExprLookupStrategyType.INDEXED_FILTERED); } ISet <EventBean> foundEvents = null; var evaluateParams = new EvaluateParams(_eventsPerStream, true, exprEvaluatorContext); // For every new event (usually 1) foreach (EventBean newEvent in newData) { _eventsPerStream[1] = newEvent; // use index to find match var matches = _tableLookupStrategy.Lookup(_eventsPerStream, exprEvaluatorContext); if ((matches == null) || (matches.IsEmpty())) { continue; } // evaluate expression var eventsIt = matches.GetEnumerator(); while (eventsIt.MoveNext()) { _eventsPerStream[0] = eventsIt.Current; foreach (EventBean aNewData in newData) { _eventsPerStream[1] = aNewData; // Stream 1 events are the originating events (on-delete events) var result = (bool?)_joinExpr.Evaluate(evaluateParams); if (result != null) { if (result.Value) { if (foundEvents == null) { foundEvents = new LinkedHashSet <EventBean>(); } foundEvents.Add(_eventsPerStream[0]); } } } } } if (foundEvents == null) { if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().AInfraTriggeredLookup(null); } return(null); } EventBean[] events = foundEvents.ToArray(); if (InstrumentationHelper.ENABLED) { InstrumentationHelper.Get().AInfraTriggeredLookup(events); } return(events); }
public CategoricalDistribution jointDistribution( params IProposition[] propositions) { ProbabilityTable d = null; IProposition conjProp = ProbUtil .constructConjunction(propositions); LinkedHashSet<RandomVariable> vars = new LinkedHashSet<RandomVariable>( conjProp.getUnboundScope()); if (vars.Count > 0) { RandomVariable[] distVars = new RandomVariable[vars.Count]; vars.CopyTo(distVars); ProbabilityTable ud = new ProbabilityTable(distVars); Object[] values = new Object[vars.Count]; //ProbabilityTable.Iterator di = new ProbabilityTable.Iterator() { // public void iterate(Map<RandomVariable, Object> possibleWorld, // double probability) { // if (conjProp.holds(possibleWorld)) { // int i = 0; // for (RandomVariable rv : vars) { // values[i] = possibleWorld.get(rv); // i++; // } // int dIdx = ud.getIndex(values); // ud.setValue(dIdx, ud.getValues()[dIdx] + probability); // } // } //}; //distribution.iterateOverTable(di); // TODO: d = ud; } else { // No Unbound Variables, therefore just return // the singular probability related to the proposition. d = new ProbabilityTable(); d.setValue(0, prior(propositions)); } return d; }
private ContextDetailMatchPair ValidateRewriteContextCondition( EPServicesContext servicesContext, StatementContext statementContext, ContextDetailCondition endpoint, ISet <string> eventTypesReferenced, MatchEventSpec priorMatches, ISet <string> priorAllTags) { if (endpoint is ContextDetailConditionCrontab) { var crontab = (ContextDetailConditionCrontab)endpoint; var schedule = ExprNodeUtility.ToCrontabSchedule(ExprNodeOrigin.CONTEXTCONDITION, crontab.Crontab, statementContext, false); crontab.Schedule = schedule; return(new ContextDetailMatchPair(crontab, new MatchEventSpec(), new LinkedHashSet <string>())); } if (endpoint is ContextDetailConditionTimePeriod) { var timePeriod = (ContextDetailConditionTimePeriod)endpoint; var validationContext = new ExprValidationContext( new StreamTypeServiceImpl(servicesContext.EngineURI, false), statementContext.MethodResolutionService, null, statementContext.SchedulingService, statementContext.VariableService, statementContext.TableService, GetDefaultAgentInstanceContext(statementContext), statementContext.EventAdapterService, statementContext.StatementName, statementContext.StatementId, statementContext.Annotations, statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, false, false, null, false); ExprNodeUtility.GetValidatedSubtree(ExprNodeOrigin.CONTEXTCONDITION, timePeriod.TimePeriod, validationContext); if (timePeriod.TimePeriod.IsConstantResult) { if (timePeriod.TimePeriod.EvaluateAsSeconds(null, true, null) < 0) { throw new ExprValidationException("Invalid negative time period expression '" + ExprNodeUtility.ToExpressionStringMinPrecedenceSafe(timePeriod.TimePeriod) + "'"); } } return(new ContextDetailMatchPair(timePeriod, new MatchEventSpec(), new LinkedHashSet <string>())); } if (endpoint is ContextDetailConditionPattern) { var pattern = (ContextDetailConditionPattern)endpoint; var matches = ValidatePatternContextConditionPattern(statementContext, pattern, eventTypesReferenced, priorMatches, priorAllTags); return(new ContextDetailMatchPair(pattern, matches.First, matches.Second)); } if (endpoint is ContextDetailConditionFilter) { var filter = (ContextDetailConditionFilter)endpoint; ValidateNotTable(servicesContext, filter.FilterSpecRaw.EventTypeName); // compile as filter if there are no prior match to consider if (priorMatches == null || (priorMatches.ArrayEventTypes.IsEmpty() && priorMatches.TaggedEventTypes.IsEmpty())) { var rawExpr = new FilterStreamSpecRaw(filter.FilterSpecRaw, ViewSpec.EMPTY_VIEWSPEC_ARRAY, null, new StreamSpecOptions()); var compiled = (FilterStreamSpecCompiled)rawExpr.Compile(statementContext, eventTypesReferenced, false, Collections.GetEmptyList <int>(), false, true, false, filter.OptionalFilterAsName); filter.FilterSpecCompiled = compiled.FilterSpec; var matchEventSpec = new MatchEventSpec(); var filterForType = compiled.FilterSpec.FilterForEventType; var allTags = new LinkedHashSet <string>(); if (filter.OptionalFilterAsName != null) { matchEventSpec.TaggedEventTypes.Put(filter.OptionalFilterAsName, new Pair <EventType, string>(filterForType, rawExpr.RawFilterSpec.EventTypeName)); allTags.Add(filter.OptionalFilterAsName); } return(new ContextDetailMatchPair(filter, matchEventSpec, allTags)); } // compile as pattern if there are prior matches to consider, since this is a type of followed-by relationship var factoryNode = servicesContext.PatternNodeFactory.MakeFilterNode(filter.FilterSpecRaw, filter.OptionalFilterAsName, 0); var pattern = new ContextDetailConditionPattern(factoryNode, true, false); var matches = ValidatePatternContextConditionPattern(statementContext, pattern, eventTypesReferenced, priorMatches, priorAllTags); return(new ContextDetailMatchPair(pattern, matches.First, matches.Second)); } else if (endpoint is ContextDetailConditionImmediate) { return(new ContextDetailMatchPair(endpoint, new MatchEventSpec(), new LinkedHashSet <string>())); } else { throw new IllegalStateException("Unrecognized endpoint type " + endpoint); } }
private TxAction ProcessBundle(Block block, List <BundleTransaction> bundleTransactions, LinkedHashSet <Transaction> transactionsInBlock, BlockReceiptsTracer receiptsTracer, ProcessingOptions processingOptions) { Snapshot snapshot = _worldState.TakeSnapshot(); int receiptSnapshot = receiptsTracer.TakeSnapshot(); UInt256 initialBalance = _stateProvider.GetBalance(block.Header.GasBeneficiary !); bool CheckFeeNotManipulated() { UInt256 finalBalance = _stateProvider.GetBalance(block.Header.GasBeneficiary !); UInt256 feeReceived = finalBalance - initialBalance; UInt256 originalSimulatedGasPrice = bundleTransactions[0].SimulatedBundleFee / bundleTransactions[0].SimulatedBundleGasUsed; UInt256 actualGasPrice = feeReceived / (UInt256)receiptsTracer.LastReceipt.GasUsed !; return(actualGasPrice >= originalSimulatedGasPrice); } bool bundleSucceeded = bundleTransactions.Count > 0; TxAction txAction = TxAction.Skip; for (int index = 0; index < bundleTransactions.Count && bundleSucceeded; index++) { txAction = ProcessBundleTransaction(block, bundleTransactions[index], index, receiptsTracer, processingOptions, transactionsInBlock); bundleSucceeded &= txAction == TxAction.Add; // if we need to stop on not first tx in the bundle, we actually want to skip the bundle txAction = txAction == TxAction.Stop && index != 0 ? TxAction.Skip : txAction; } if (bundleSucceeded) { bundleSucceeded &= CheckFeeNotManipulated(); } if (bundleSucceeded) { for (int index = 0; index < bundleTransactions.Count; index++) { BundleTransaction bundleTransaction = bundleTransactions[index]; transactionsInBlock.Add(bundleTransaction); int txIndex = receiptSnapshot + index; _transactionProcessed?.Invoke(this, new TxProcessedEventArgs(txIndex, bundleTransaction, receiptsTracer.TxReceipts[txIndex])); } } else { _worldState.Restore(snapshot); receiptsTracer.Restore(receiptSnapshot); for (int index = 0; index < bundleTransactions.Count; index++) { transactionsInBlock.Remove(bundleTransactions[index]); } } bundleTransactions.Clear(); return(txAction); }
public void TestContainsObject() { Assert.IsTrue(hs.Contains(objArray[90]), "Returned false for valid object"); Assert.IsTrue(!hs.Contains(new Object()), "Returned true for invalid Object"); LinkedHashSet<Object> s = new LinkedHashSet<Object>(); s.Add(null); Assert.IsTrue(s.Contains(null), "Cannot handle null"); }
public void TestConstructor() { LinkedHashSet<Object> hs2 = new LinkedHashSet<Object>(); Assert.AreEqual(0, hs2.Size(), "Created incorrect LinkedHashSet"); }
public void test_RemoveLjava_lang_Object() { int size = hs.Size(); hs.Remove((Object) 98); Assert.IsTrue(!hs.Contains(98), "Failed to Remove element"); Assert.IsTrue(hs.Size() == size - 1, "Failed to decrement set size"); LinkedHashSet<Object> s = new LinkedHashSet<Object>(); s.Add(null); Assert.IsTrue(s.Remove(null), "Cannot handle null"); }
public HashSet<Action> actions(Object state) { HashSet<Action> actions = new LinkedHashSet<Action>(); NQueensBoard board = (NQueensBoard)state; for (int i = 0; i < board.getSize(); i++) for (int j = 0; j < board.getSize(); j++) { XYLocation loc = new XYLocation(i, j); if (!board.queenExistsAt(loc)) actions .Add(new QueenAction(QueenAction.MOVE_QUEEN, loc)); } return actions; }
/// <summary> /// </summary> /// <param name="trits"></param> /// <param name="bytes"></param> /// <exception cref="ArgumentException"></exception> public static void FromTritsToBytes(int[] trits, byte[] bytes) { if (trits.Length != TritsLength) { throw new ArgumentException("trits array has invalid size"); } if (bytes.Length != ByteLength) { throw new ArgumentException("bytes array has invalid size"); } var baseHalf3 = new int[IntLength]; var setUniqueNumbers = new LinkedHashSet <int>(); foreach (var x in trits) { setUniqueNumbers.Add(x); } if (setUniqueNumbers.Count == 1 && setUniqueNumbers.Contains(-1)) { baseHalf3 = (int[])Half3.Clone(); BigIntNot(baseHalf3); BigIntAdd(baseHalf3, 1); } else { var size = IntLength; for (var i = TritsLength - 1; i-- > 0;) { { // Multiply by radix var sz = size; var carry = 0; for (var j = 0; j < sz; j++) { // full_mul var v = ToUnsignedLong(baseHalf3[j]) * 3 + ToUnsignedLong(carry); carry = (int)((v >> (sizeof(int) * 8)) & 0xFFFFFFFF); baseHalf3[j] = (int)(v & 0xFFFFFFFF); } if (carry > 0) { baseHalf3[sz] = carry; size += 1; } } var inValue = trits[i] + 1; { // Add var sz = BigIntAdd(baseHalf3, inValue); if (sz > size) { size = sz; } } } if (Sum(baseHalf3) != 0) { if (BigIntCmp(Half3, baseHalf3) <= 0) { // base is >= HALF_3. // just do base - HALF_3 baseHalf3 = BigIntSub(baseHalf3, Half3); } else { // we don't have a wrapping sub. // so we need to be clever. baseHalf3 = BigIntSub(Half3, baseHalf3); BigIntNot(baseHalf3); BigIntAdd(baseHalf3, 1); } } } // output for (var i = 0; i < IntLength; i++) { bytes[i * 4 + 0] = (byte)((baseHalf3[IntLength - 1 - i] & 0xFF000000) >> 24); bytes[i * 4 + 1] = (byte)((baseHalf3[IntLength - 1 - i] & 0x00FF0000) >> 16); bytes[i * 4 + 2] = (byte)((baseHalf3[IntLength - 1 - i] & 0x0000FF00) >> 8); bytes[i * 4 + 3] = (byte)((baseHalf3[IntLength - 1 - i] & 0x000000FF) >> 0); } }
public void DoesNotThrowWhenTryToSerializeWithBinaryFormatter() { var set = new LinkedHashSet<int> { 1, 10, 5 }; var formatter = new BinaryFormatter(); using (var stream = new MemoryStream()) { Assert.DoesNotThrow(() => { formatter.Serialize(stream, set); }); } }
public void ShouldPreserveOrderingOnSymmetricExcept() { var set = new LinkedHashSet<int> { 1, 10, 5 }; set.SymmetricExceptWith(new int[] { 1, 10, 3, 9 }); Assert.That(set, Has.Count.EqualTo(3)); Assert.That(set.ToArray(), Is.EqualTo(new[] { 5, 3, 9 })); }
protected void SetUp() { hs = new LinkedHashSet<Object>(); for (int i = 0; i < objArray.Length; i++) { hs.Add(objArray[i]); } hs.Add(null); }
public static void PopulateObject(String operatorName, int operatorNum, String dataFlowName, IDictionary <String, Object> objectProperties, Object top, EngineImportService engineImportService, EPDataFlowOperatorParameterProvider optionalParameterProvider, IDictionary <String, Object> optionalParameterURIs) { var applicableClass = top.GetType(); var writables = PropertyHelper.GetWritableProperties(applicableClass); var annotatedFields = TypeHelper.FindAnnotatedFields(applicableClass, typeof(DataFlowOpParameterAttribute)); var annotatedMethods = TypeHelper.FindAnnotatedMethods(applicableClass, typeof(DataFlowOpParameterAttribute)); // find catch-all methods var catchAllMethods = new LinkedHashSet <MethodInfo>(); if (annotatedMethods != null) { foreach (var method in annotatedMethods) { var anno = (DataFlowOpParameterAttribute)TypeHelper.GetAnnotations( typeof(DataFlowOpParameterAttribute), method.GetCustomAttributes(true).Cast <Attribute>().ToArray())[0]; if (anno.All) { var parameterTypes = method.GetParameterTypes(); if ((parameterTypes.Length == 2) && (parameterTypes[0] == typeof(String)) && (parameterTypes[1] == typeof(Object))) { catchAllMethods.Add(method); continue; } throw new ExprValidationException("Invalid annotation for catch-call"); } } } // map provided values foreach (var property in objectProperties) { var found = false; var propertyName = property.Key; // invoke catch-all setters foreach (var method in catchAllMethods) { try { method.Invoke(top, new Object[] { propertyName, property.Value }); } catch (MemberAccessException e) { throw new ExprValidationException("Illegal access invoking method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + method.Name, e); } catch (TargetInvocationException e) { throw new ExprValidationException("Exception invoking method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + method.Name + ": " + e.InnerException.Message, e); } found = true; } if (propertyName.ToLower() == CLASS_PROPERTY_NAME) { continue; } // use the writeable property descriptor (appropriate setter method) from writing the property var descriptor = FindDescriptor(applicableClass, propertyName, writables); if (descriptor != null) { var coerceProperty = CoerceProperty(propertyName, applicableClass, property.Value, descriptor.PropertyType, engineImportService, false, true); try { descriptor.WriteMethod.Invoke(top, new Object[] { coerceProperty }); } catch (ArgumentException e) { throw new ExprValidationException("Illegal argument invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMethod.Name + " provided value " + coerceProperty, e); } catch (MemberAccessException e) { throw new ExprValidationException("Illegal access invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMethod.Name, e); } catch (TargetInvocationException e) { throw new ExprValidationException("Exception invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMethod.Name + ": " + e.InnerException.Message, e); } continue; } // in .NET, it's common to name fields with an underscore prefix, this modified // notation is preserved in the modPropertyName var modPropertyName = "_" + propertyName; // find the field annotated with <seealso cref="GraphOpProperty" /> foreach (var annotatedField in annotatedFields) { var anno = (DataFlowOpParameterAttribute)TypeHelper.GetAnnotations( typeof(DataFlowOpParameterAttribute), annotatedField.GetCustomAttributes(true).Cast <Attribute>().ToArray())[0]; if ((anno.Name == propertyName) || (annotatedField.Name == propertyName) || (annotatedField.Name == modPropertyName)) { var coerceProperty = CoerceProperty( propertyName, applicableClass, property.Value, annotatedField.FieldType, engineImportService, true, true); try { annotatedField.SetValue(top, coerceProperty); } catch (Exception e) { throw new ExprValidationException( "Failed to set field '" + annotatedField.Name + "': " + e.Message, e); } found = true; break; } } if (found) { continue; } throw new ExprValidationException("Failed to find writable property '" + propertyName + "' for class " + applicableClass); } // second pass: if a parameter URI - value pairs were provided, check that if (optionalParameterURIs != null) { foreach (var annotatedField in annotatedFields) { try { var uri = operatorName + "/" + annotatedField.Name; if (optionalParameterURIs.ContainsKey(uri)) { var value = optionalParameterURIs.Get(uri); annotatedField.SetValue(top, value); if (Log.IsDebugEnabled) { Log.Debug("Found parameter '" + uri + "' for data flow " + dataFlowName + " setting " + value); } } else { if (Log.IsDebugEnabled) { Log.Debug("Not found parameter '" + uri + "' for data flow " + dataFlowName); } } } catch (Exception e) { throw new ExprValidationException("Failed to set field '" + annotatedField.Name + "': " + e.Message, e); } } foreach (var method in annotatedMethods) { //var anno = (DataFlowOpParameterAttribute) TypeHelper.GetAnnotations<DataFlowOpParameterAttribute>(method.GetCustomAttributes(false))[0]; var anno = method.GetCustomAttributes(typeof(DataFlowOpParameterAttribute), false) .Cast <DataFlowOpParameterAttribute>() .First(); if (anno.All) { var parameterTypes = method.GetParameterTypes(); if (parameterTypes.Length == 2 && parameterTypes[0] == typeof(string) && parameterTypes[1] == typeof(object)) { foreach (var entry in optionalParameterURIs) { var uri = new Uri(entry.Key, UriKind.RelativeOrAbsolute); var elements = URIUtil.ParsePathElements(uri); if (elements.Length < 2) { throw new ExprValidationException(string.Format("Failed to parse URI '{0}', expected 'operator_name/property_name' format", entry.Key)); } if (elements[0] == operatorName) { try { method.Invoke(top, new Object[] { elements[1], entry.Value }); } catch (ArgumentException e) { throw new ExprValidationException("Illegal argument invoking setter method for property '" + entry.Key + "' for class " + applicableClass.Name + " method " + method.Name, e); } catch (MemberAccessException e) { throw new ExprValidationException("Illegal access invoking setter method for property '" + entry.Key + "' for class " + applicableClass.Name + " method " + method.Name, e); } catch (TargetInvocationException e) { throw new ExprValidationException("Exception invoking setter method for property '" + entry.Key + "' for class " + applicableClass.Name + " method " + method.Name + ": " + e.InnerException.Message, e); } } } } } } } // third pass: if a parameter provider is provided, use that if (optionalParameterProvider != null) { foreach (var annotatedField in annotatedFields) { try { var provided = annotatedField.GetValue(top); var value = optionalParameterProvider.Provide(new EPDataFlowOperatorParameterProviderContext(operatorName, annotatedField.Name, top, operatorNum, provided, dataFlowName)); if ((value == null) && (annotatedField.Name.StartsWith("_"))) { value = optionalParameterProvider.Provide(new EPDataFlowOperatorParameterProviderContext(operatorName, annotatedField.Name.Substring(1), top, operatorNum, provided, dataFlowName)); } if (value != null) { annotatedField.SetValue(top, value); } } catch (Exception e) { throw new ExprValidationException("Failed to set field '" + annotatedField.Name + "': " + e.Message, e); } } } }
public virtual int Run(string[] args) { Log.Info("Starting ZKRMStateStorePerf ver." + version); int numApp = ZkPerfNumAppDefault; int numAppAttemptPerApp = ZkPerfNumAppattemptPerApp; string hostPort = null; bool launchLocalZK = true; if (args.Length == 0) { System.Console.Error.WriteLine("Missing arguments."); return(-1); } for (int i = 0; i < args.Length; i++) { // parse command line if (Sharpen.Runtime.EqualsIgnoreCase(args[i], "-appsize")) { numApp = System.Convert.ToInt32(args[++i]); } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[i], "-appattemptsize")) { numAppAttemptPerApp = System.Convert.ToInt32(args[++i]); } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[i], "-hostPort")) { hostPort = args[++i]; launchLocalZK = false; } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[i], "-workingZnode")) { workingZnode = args[++i]; } else { System.Console.Error.WriteLine("Illegal argument: " + args[i]); return(-1); } } } } } if (launchLocalZK) { try { SetUp(); } catch (Exception e) { System.Console.Error.WriteLine("failed to setup. : " + e.Message); return(-1); } } InitStore(hostPort); long submitTime = Runtime.CurrentTimeMillis(); long startTime = Runtime.CurrentTimeMillis() + 1234; AList <ApplicationId> applicationIds = new AList <ApplicationId>(); AList <RMApp> rmApps = new AList <RMApp>(); AList <ApplicationAttemptId> attemptIds = new AList <ApplicationAttemptId>(); Dictionary <ApplicationId, ICollection <ApplicationAttemptId> > appIdsToAttemptId = new Dictionary <ApplicationId, ICollection <ApplicationAttemptId> >(); RMStateStoreTestBase.TestDispatcher dispatcher = new RMStateStoreTestBase.TestDispatcher (); store.SetRMDispatcher(dispatcher); for (int i_1 = 0; i_1 < numApp; i_1++) { ApplicationId appId = ApplicationId.NewInstance(clusterTimeStamp, i_1); applicationIds.AddItem(appId); AList <ApplicationAttemptId> attemptIdsForThisApp = new AList <ApplicationAttemptId >(); for (int j = 0; j < numAppAttemptPerApp; j++) { ApplicationAttemptId attemptId = ApplicationAttemptId.NewInstance(appId, j); attemptIdsForThisApp.AddItem(attemptId); } appIdsToAttemptId[appId] = new LinkedHashSet(attemptIdsForThisApp); Sharpen.Collections.AddAll(attemptIds, attemptIdsForThisApp); } foreach (ApplicationId appId_1 in applicationIds) { RMApp app = null; try { app = StoreApp(store, appId_1, submitTime, startTime); } catch (Exception e) { System.Console.Error.WriteLine("failed to create Application Znode. : " + e.Message ); return(-1); } WaitNotify(dispatcher); rmApps.AddItem(app); } foreach (ApplicationAttemptId attemptId_1 in attemptIds) { Org.Apache.Hadoop.Security.Token.Token <AMRMTokenIdentifier> tokenId = GenerateAMRMToken (attemptId_1, appTokenMgr); SecretKey clientTokenKey = clientToAMTokenMgr.CreateMasterKey(attemptId_1); try { StoreAttempt(store, attemptId_1, ContainerId.NewContainerId(attemptId_1, 0L).ToString (), tokenId, clientTokenKey, dispatcher); } catch (Exception e) { System.Console.Error.WriteLine("failed to create AppAttempt Znode. : " + e.Message ); return(-1); } } long storeStart = Runtime.CurrentTimeMillis(); try { store.LoadState(); } catch (Exception e) { System.Console.Error.WriteLine("failed to locaState from ZKRMStateStore. : " + e. Message); return(-1); } long storeEnd = Runtime.CurrentTimeMillis(); long loadTime = storeEnd - storeStart; string resultMsg = "ZKRMStateStore takes " + loadTime + " msec to loadState."; Log.Info(resultMsg); System.Console.Out.WriteLine(resultMsg); // cleanup try { foreach (RMApp app in rmApps) { ApplicationStateData appState = ApplicationStateData.NewInstance(app.GetSubmitTime (), app.GetStartTime(), app.GetApplicationSubmissionContext(), app.GetUser()); ApplicationId appId = app.GetApplicationId(); IDictionary m = Org.Mockito.Mockito.Mock <IDictionary>(); Org.Mockito.Mockito.When(m.Keys).ThenReturn(appIdsToAttemptId[appId_1]); appState.attempts = m; store.RemoveApplicationStateInternal(appState); } } catch (Exception e) { System.Console.Error.WriteLine("failed to cleanup. : " + e.Message); return(-1); } return(0); }
private static InheritanceInfo CreateInheritanceInfo(Type type) { LinkedHashSet<Type> intf = new LinkedHashSet<Type>(); Java.Util.HashSet<Type> baseTypes = new Java.Util.HashSet<Type>(); baseTypes.Add(typeof(object)); // Note that JavaGetInterfaces will only return interfaces declared by the current type, // while .NET returns a flattened map of all interfaces. // http://stackoverflow.com/questions/6616055/get-all-derived-interfaces-of-a-class // http://stackoverflow.com/questions/9793242/type-getinterfaces-for-declared-interfaces-only Java.Util.IQueue<Type> toVisit = new Java.Util.LinkedList<Type>(); toVisit.Add(type); while (toVisit.Peek() != null) { var currentType = toVisit.Poll(); var gti = GenericInstanceFactory.GetGenericTypeInfo(currentType); bool isInterface = gti != null ? gti.TypeDefinition.JavaIsInterface() : currentType.JavaIsInterface(); if (!isInterface) { var baseType = currentType.BaseType; if (baseType != null && baseType != typeof(object)) { toVisit.Add(baseType); baseTypes.Add(baseType); } } if (gti == null) { AddInterfaces(currentType.JavaGetInterfaces(), intf, toVisit); continue; } var typeDef = gti.TypeDefinition; var interfaces = typeDef.JavaGetInterfaces(); var genericInstanceClass = typeDef.GetAnnotation<ITypeReflectionInfo>(typeof(ITypeReflectionInfo)); if (genericInstanceClass == null) { AddInterfaces(interfaces, intf, toVisit); continue; } var def = genericInstanceClass.GenericDefinitions(); if (def.Length == 0) { AddInterfaces(interfaces, intf, toVisit); continue; } for (int i = 0; i < interfaces.Length; ++i) interfaces[i] = ToMatchedGenericInstanceType(interfaces[i], currentType, def); AddInterfaces(interfaces, intf, toVisit); } return new InheritanceInfo { Interfaces = new JavaCollectionWrapper<Type>(intf).ToArray(), InterfacesSet = new Java.Util.HashSet<Type>(intf), BaseTypes = baseTypes }; }
public void TestIterator() { Iterator<Object> i = hs.Iterator(); int x = 0; int j; for (j = 0; i.HasNext; j++) { Object oo = i.Next(); if (oo != null) { int ii = (int) oo; Assert.IsTrue(ii == j, "Incorrect element found"); } else { Assert.IsTrue(hs.Contains(oo), "Cannot find null"); } ++x; } Assert.IsTrue(hs.Size() == x, "Returned iteration of incorrect size"); LinkedHashSet<Object> s = new LinkedHashSet<Object>(); s.Add(null); Assert.IsNull(s.Iterator().Next(), "Cannot handle null"); }
private static LogicalChannelBindingMethodDesc FindMatchingMethod( string operatorName, Type target, LogicalChannel channelDesc, bool isPunctuation) { if (isPunctuation) { foreach (var method in target.GetMethods()) { if (method.Name.Equals("OnSignal")) { return new LogicalChannelBindingMethodDesc(method, LogicalChannelBindingTypePassAlong.INSTANCE); } } return null; } var outputPort = channelDesc.OutputPort; Type[] expectedIndividual; Type expectedUnderlying; EventType expectedUnderlyingType; var typeDesc = outputPort.GraphTypeDesc; if (typeDesc.IsWildcard) { expectedIndividual = new Type[0]; expectedUnderlying = null; expectedUnderlyingType = null; } else { expectedIndividual = new Type[typeDesc.EventType.PropertyNames.Length]; var i = 0; foreach (var descriptor in typeDesc.EventType.PropertyDescriptors) { expectedIndividual[i] = descriptor.PropertyType; i++; } expectedUnderlying = typeDesc.EventType.UnderlyingType; expectedUnderlyingType = typeDesc.EventType; } string channelSpecificMethodName = null; if (channelDesc.ConsumingOptStreamAliasName != null) { channelSpecificMethodName = "On" + channelDesc.ConsumingOptStreamAliasName; } var methods = target.GetMethods(); foreach (var method in methods) { var eligible = method.Name.Equals("OnInput"); if (!eligible && method.Name.Equals(channelSpecificMethodName)) { eligible = true; } if (!eligible) { continue; } // handle Object[] var paramTypes = method.GetParameterTypes(); var numParams = paramTypes.Length; if (expectedUnderlying != null) { if (numParams == 1 && TypeHelper.IsAssignmentCompatible(expectedUnderlying, paramTypes[0])) { return new LogicalChannelBindingMethodDesc( method, LogicalChannelBindingTypePassAlong.INSTANCE); } if (numParams == 2 && paramTypes[0].GetBoxedType() == typeof(int?) && TypeHelper.IsAssignmentCompatible(expectedUnderlying, paramTypes[1])) { return new LogicalChannelBindingMethodDesc( method, new LogicalChannelBindingTypePassAlongWStream(channelDesc.ConsumingOpStreamNum)); } } if (numParams == 1 && (paramTypes[0] == typeof(object) || paramTypes[0] == typeof(object[]) && method.IsVarArgs())) { return new LogicalChannelBindingMethodDesc( method, LogicalChannelBindingTypePassAlong.INSTANCE); } if (numParams == 2 && paramTypes[0] == typeof(int) && (paramTypes[1] == typeof(object) || paramTypes[1] == typeof(object[]) && method.IsVarArgs())) { return new LogicalChannelBindingMethodDesc( method, new LogicalChannelBindingTypePassAlongWStream(channelDesc.ConsumingOpStreamNum)); } // if exposing a method that exactly matches each property type in order, use that, i.e. "onInut(String p0, int p1)" if (expectedUnderlyingType is ObjectArrayEventType && TypeHelper.IsSignatureCompatible(expectedIndividual, paramTypes)) { return new LogicalChannelBindingMethodDesc(method, LogicalChannelBindingTypeUnwind.INSTANCE); } } ISet<string> choices = new LinkedHashSet<string>(); choices.Add(typeof(object).Name); choices.Add("Object[]"); if (expectedUnderlying != null) { choices.Add(expectedUnderlying.Name); } throw new ExprValidationException( "Failed to find OnInput method on for operator '" + operatorName + "' class " + target.Name + ", expected an OnInput method that takes any of {" + CollectionUtil.ToString(choices) + "}"); }
private static CreateSchemaDesc GetSchemaDesc( EsperEPL2GrammarParser.CreateSchemaDefContext ctx, AssignedType assignedType) { var schemaName = ctx.name.Text; var columnTypes = GetColTypeList(ctx.createColumnList()); // get model-after types (could be multiple for variants) ISet<string> typeNames = new LinkedHashSet<string>(); if (ctx.variantList() != null) { IList<EsperEPL2GrammarParser.VariantListElementContext> variantCtxs = ctx.variantList().variantListElement(); foreach (var variantCtx in variantCtxs) { typeNames.Add(variantCtx.GetText()); } } // get inherited and start timestamp and end timestamps string startTimestamp = null; string endTimestamp = null; ISet<string> inherited = new LinkedHashSet<string>(); ISet<string> copyFrom = new LinkedHashSet<string>(); if (ctx.createSchemaQual() != null) { IList<EsperEPL2GrammarParser.CreateSchemaQualContext> qualCtxs = ctx.createSchemaQual(); foreach (var qualCtx in qualCtxs) { var qualName = qualCtx.i.Text.ToLowerInvariant(); var cols = ASTUtil.GetIdentList(qualCtx.columnList()); if (string.Equals(qualName, "inherits", StringComparison.InvariantCultureIgnoreCase)) { inherited.AddAll(cols); continue; } if (string.Equals(qualName, "starttimestamp", StringComparison.InvariantCultureIgnoreCase)) { startTimestamp = cols[0]; continue; } if (string.Equals(qualName, "endtimestamp", StringComparison.InvariantCultureIgnoreCase)) { endTimestamp = cols[0]; continue; } if (string.Equals(qualName, "copyfrom", StringComparison.InvariantCultureIgnoreCase)) { copyFrom.AddAll(cols); continue; } throw new EPException( "Expected 'inherits', 'starttimestamp', 'endtimestamp' or 'copyfrom' keyword after create-schema clause but encountered '" + qualName + "'"); } } return new CreateSchemaDesc(schemaName, typeNames, columnTypes, inherited, assignedType, startTimestamp, endTimestamp, copyFrom); }
public OperatorDependencyEntry() { Incoming = new LinkedHashSet <int>(); Outgoing = new LinkedHashSet <int>(); }
protected void ApplyRelationUpdateItem(IObjRefContainer entity, IRelationUpdateItem rui, bool isUpdate, IEntityMetaData metaData, IList <DirectValueHolderRef> toPrefetch, List <IObjRef> toFetchFromCache, bool checkBaseState, IList <IBackgroundWorkerDelegate> runnables) { IObjRefHelper objRefHelper = this.ObjRefHelper; String memberName = rui.MemberName; int relationIndex = metaData.GetIndexByRelationName(memberName); RelationMember relationMember = metaData.RelationMembers[relationIndex]; IObjRef[] existingORIs; if (entity.Is__Initialized(relationIndex)) { existingORIs = ListUtil.ToArray(ObjRefHelper.ExtractObjRefList(relationMember.GetValue(entity), null)); } else { existingORIs = entity.Get__ObjRefs(relationIndex); if (existingORIs == null) { toPrefetch.Add(new DirectValueHolderRef(entity, relationMember, true)); runnables.Add(new IBackgroundWorkerDelegate(delegate() { ApplyRelationUpdateItem(entity, rui, isUpdate, metaData, toPrefetch, toFetchFromCache, checkBaseState, runnables); })); return; } } IObjRef[] addedORIs = rui.AddedORIs; IObjRef[] removedORIs = rui.RemovedORIs; IObjRef[] newORIs; if (existingORIs.Length == 0) { if (checkBaseState && removedORIs != null) { throw new Exception("Removing from empty member"); } newORIs = addedORIs != null ? (IObjRef[])addedORIs.Clone() : ObjRef.EMPTY_ARRAY; for (int a = newORIs.Length; a-- > 0;) { newORIs[a] = CloneObjRef(newORIs[a], false); } } else { // Set to efficiently remove entries LinkedHashSet <IObjRef> existingORIsSet = new LinkedHashSet <IObjRef>(existingORIs); if (removedORIs != null) { foreach (IObjRef removedORI in removedORIs) { IObjRef clonedObjRef = CloneObjRef(removedORI, false); if (existingORIsSet.Remove(clonedObjRef) || !checkBaseState) { continue; } throw OptimisticLockUtil.ThrowModified(objRefHelper.EntityToObjRef(entity), null, entity); } } if (addedORIs != null) { foreach (IObjRef addedORI in addedORIs) { IObjRef clonedObjRef = CloneObjRef(addedORI, false); if (existingORIsSet.Add(clonedObjRef) || !checkBaseState) { continue; } throw OptimisticLockUtil.ThrowModified(objRefHelper.EntityToObjRef(entity), null, entity); } } if (existingORIsSet.Count == 0) { newORIs = ObjRef.EMPTY_ARRAY; } else { newORIs = existingORIsSet.ToArray(); } } if (!entity.Is__Initialized(relationIndex)) { entity.Set__ObjRefs(relationIndex, newORIs); return; } toFetchFromCache.AddRange(newORIs); runnables.Add(new IBackgroundWorkerDelegate(delegate() { ICache stateCache = cloneStateTL.Value.incrementalState.GetStateCache(); IList <Object> objects = stateCache.GetObjects(newORIs, CacheDirective.FailEarly); Object value; if (relationMember.IsToMany) { // To-many relation Object coll = ListUtil.CreateObservableCollectionOfType(relationMember.RealType, objects.Count); ListUtil.FillList(coll, objects); value = coll; } else { // To-one relation value = objects.Count > 0 ? objects[0] : null; } relationMember.SetValue(entity, value); })); }
public static SerdeEventPropertyDesc ForgeForEventProperty( EventType eventTypeSerde, string propertyName, object propertyType, StatementRawInfo raw, SerdeCompileTimeResolver resolver) { DataInputOutputSerdeForge forge; if (propertyType == null) { return(new SerdeEventPropertyDesc(new DataInputOutputSerdeForgeSingleton(typeof(DIOSkipSerde)), EmptySet <EventType> .Instance)); } if (propertyType is Type propertyTypeType) { // handle special Json catch-all types if (eventTypeSerde is JsonEventType) { forge = null; if (propertyTypeType == typeof(IDictionary <string, object>)) { forge = new DataInputOutputSerdeForgeSingleton(typeof(DIOJsonObjectSerde)); } else if (propertyTypeType == typeof(object[])) { forge = new DataInputOutputSerdeForgeSingleton(typeof(DIOJsonArraySerde)); } else if (propertyTypeType == typeof(object)) { forge = new DataInputOutputSerdeForgeSingleton(typeof(DIOJsonAnyValueSerde)); } if (forge != null) { return(new SerdeEventPropertyDesc(forge, EmptySet <EventType> .Instance)); } } // handle all Class-type properties var typedProperty = (Type)propertyType; if (typedProperty == typeof(object) && propertyName.Equals(INTERNAL_RESERVED_PROPERTY)) { forge = new DataInputOutputSerdeForgeSingleton( typeof(DIOSkipSerde)); // for expression data window or others that include transient references in the field } else { forge = resolver.SerdeForEventProperty(typedProperty, eventTypeSerde.Name, propertyName, raw); } return(new SerdeEventPropertyDesc(forge, EmptySet <EventType> .Instance)); } if (propertyType is EventType) { var eventType = (EventType)propertyType; Func <DataInputOutputSerdeForgeParameterizedVars, CodegenExpression> func = vars => ResolveTypeCodegenGivenResolver(eventType, vars.OptionalEventTypeResolver); forge = new DataInputOutputSerdeForgeEventSerde("NullableEvent", func); return(new SerdeEventPropertyDesc(forge, Collections.SingletonSet(eventType))); } else if (propertyType is EventType[]) { var eventType = ((EventType[])propertyType)[0]; Func <DataInputOutputSerdeForgeParameterizedVars, CodegenExpression> func = vars => ResolveTypeCodegenGivenResolver(eventType, vars.OptionalEventTypeResolver); forge = new DataInputOutputSerdeForgeEventSerde("NullableEventArray", func); return(new SerdeEventPropertyDesc(forge, Collections.SingletonSet(eventType))); } else if (propertyType is TypeBeanOrUnderlying) { var eventType = ((TypeBeanOrUnderlying)propertyType).EventType; Func <DataInputOutputSerdeForgeParameterizedVars, CodegenExpression> func = vars => ResolveTypeCodegenGivenResolver(eventType, vars.OptionalEventTypeResolver); forge = new DataInputOutputSerdeForgeEventSerde("NullableEventOrUnderlying", func); return(new SerdeEventPropertyDesc(forge, Collections.SingletonSet(eventType))); } else if (propertyType is TypeBeanOrUnderlying[]) { var eventType = ((TypeBeanOrUnderlying[])propertyType)[0].EventType; Func <DataInputOutputSerdeForgeParameterizedVars, CodegenExpression> func = vars => ResolveTypeCodegenGivenResolver(eventType, vars.OptionalEventTypeResolver); forge = new DataInputOutputSerdeForgeEventSerde("NullableEventArrayOrUnderlying", func); return(new SerdeEventPropertyDesc(forge, Collections.SingletonSet(eventType))); } else if (propertyType is IDictionary <string, object> keyValueProperties) { var keys = new string[keyValueProperties.Count]; var serdes = new DataInputOutputSerdeForge[keyValueProperties.Count]; var index = 0; var nestedTypes = new LinkedHashSet <EventType>(); // Rewrite all properties where the value is a string. First, gather all instances that need // to be rewritten into the class that matches the type. keyValueProperties .Where(entry => entry.Value is string) .ToList() .ForEach( entry => { var value = entry.Value.ToString()?.Trim(); var clazz = TypeHelper.GetPrimitiveTypeForName(value); if (clazz != null) { keyValueProperties[entry.Key] = clazz; } }); foreach (var entry in keyValueProperties) { keys[index] = entry.Key; var desc = ForgeForEventProperty(eventTypeSerde, entry.Key, entry.Value, raw, resolver); nestedTypes.AddAll(desc.NestedTypes); serdes[index] = desc.Forge; index++; } var functions = new Func <DataInputOutputSerdeForgeParameterizedVars, CodegenExpression> [2]; functions[0] = vars => Constant(keys); functions[1] = vars => DataInputOutputSerdeForgeExtensions.CodegenArray(serdes, vars.Method, vars.Scope, vars.OptionalEventTypeResolver); forge = new DataInputOutputSerdeForgeParameterized(typeof(DIOMapPropertySerde).Name, functions); return(new SerdeEventPropertyDesc(forge, nestedTypes)); } else { throw new EPException( "Failed to determine serde for unrecognized property value type '" + propertyType + "' for property '" + propertyName + "' of type '" + eventTypeSerde.Name + "'"); } }
public void SetDefaults() { DateTime today = new DateTime(DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day); StringSet = new HashSet <string> { "foo", "bar", "baz" }; StringDateMap = new SortedList(); StringDateMap.Add("now", DateTime.Now); StringDateMap.Add("never", null); // value is persisted since NH-2199 // according to SQL Server the big bag happened in 1753 ;) StringDateMap.Add("big bang", new DateTime(1753, 01, 01)); //StringDateMap.Add( "millenium", new DateTime( 2000, 01, 01 ) ); StringArray = StringSet.ToArray(); StringList = new ArrayList(StringArray); IntArray = new int[] { 1, 3, 3, 7 }; FooArray = new Foo[0]; Customs = new ArrayList(); Customs.Add(new String[] { "foo", "bar" }); Customs.Add(new String[] { "A", "B" }); Customs.Add(new String[] { "1", "2" }); FooSet = new HashSet <FooProxy>(); Components = new FooComponent[] { new FooComponent("foo", 42, null, null), new FooComponent("bar", 88, null, new FooComponent("sub", 69, null, null)) }; TimeArray = new DateTime[] { new DateTime(), new DateTime(), new DateTime(), // H2.1 has null here, but it's illegal on .NET new DateTime(0) }; Count = 667; Name = "Bazza"; TopComponents = new ArrayList(); TopComponents.Add(new FooComponent("foo", 11, new DateTime[] { today, new DateTime(2123, 1, 1) }, null)); TopComponents.Add( new FooComponent("bar", 22, new DateTime[] { new DateTime(2007, 2, 3), new DateTime(1945, 6, 1) }, null)); TopComponents.Add(null); Bag = new ArrayList(); Bag.Add("duplicate"); Bag.Add("duplicate"); Bag.Add("duplicate"); Bag.Add("unique"); Cached = new LinkedHashSet <CompositeElement>(); CompositeElement ce = new CompositeElement(); ce.Foo = "foo"; ce.Bar = "bar"; CompositeElement ce2 = new CompositeElement(); ce2.Foo = "fooxxx"; ce2.Bar = "barxxx"; Cached.Add(ce); Cached.Add(ce2); CachedMap = new SortedList(); CachedMap.Add(this, ce); }
public HashSet<EnvironmentObject> getObjectsNear(Agent agent, int radius) { HashSet<EnvironmentObject> objsNear = new LinkedHashSet<EnvironmentObject>(); XYLocation agentLocation = getCurrentLocationFor(agent); foreach (XYLocation loc in objsAtLocation.keySet()) { if (withinRadius(radius, agentLocation, loc)) { objsNear.AddRange(objsAtLocation.get(loc)); } } // Ensure the 'agent' is not included in the Set of // objects near objsNear.remove(agent); return objsNear; }
/// <summary> /// create a VCF header from a set of header record lines /// </summary> /// <param name="headerStrings"> a list of strings that represent all the ## and # entries </param> /// <returns> a VCFHeader object </returns> protected internal virtual VCFHeader parseHeaderFromLines (IList<string> headerStrings, VCFHeaderVersion version) { this.version = version; ISet<VCFHeaderLine> metaData = new LinkedHashSet<VCFHeaderLine> (); ISet<string> sampleNames = new LinkedHashSet<string> (); int contigCounter = 0; // iterate over all the passed in strings foreach (string str in headerStrings) { if (!str.StartsWith (VCFHeader.METADATA_INDICATOR)) {//presumably the #CHROM POS ID REF ALT QUAL FILTER INFO etc. line string[] strings = str.Substring (1).Split (VCFConstants.FIELD_SEPARATOR_CHAR); //check for null last string, grrr... if (String.IsNullOrEmpty (strings.Last ())) { strings = strings.Take (strings.Length - 1).ToArray (); } if (strings.Length < VCFHeader.HEADER_FIELDS.Length) { throw new VCFParsingError ("There are not enough columns present in the header line: " + str); } //Verify Arrays var misMatchedColumns = Enumerable.Range (0, VCFHeader.HEADER_FIELDS.Length).Where (x => VCFHeader.HEADER_FIELDS [x] != strings [x]).Select (x => strings [x]).ToArray (); if (misMatchedColumns.Length > 0) { throw new VCFParsingError ("We were not expecting column name '" + misMatchedColumns [0] + " in that position"); } int arrayIndex = VCFHeader.HEADER_FIELDS.Length;//start after verified columns bool sawFormatTag = false; if (arrayIndex < strings.Length) { if (!strings [arrayIndex].Equals ("FORMAT")) { throw new VCFParsingError ("we were expecting column name 'FORMAT' but we saw '" + strings [arrayIndex] + "'"); } sawFormatTag = true; arrayIndex++; } while (arrayIndex < strings.Length) { sampleNames.Add (strings [arrayIndex++]); } if (sawFormatTag && sampleNames.Count == 0) { throw new VCFParsingError ("The FORMAT field was provided but there is no genotype/sample data"); } } else { if (str.StartsWith (VCFConstants.INFO_HEADER_START)) { VCFInfoHeaderLine info = new VCFInfoHeaderLine (str.Substring (7), version); metaData.Add (info); } else if (str.StartsWith (VCFConstants.FILTER_HEADER_START)) { VCFFilterHeaderLine filter = new VCFFilterHeaderLine (str.Substring (9), version); metaData.Add (filter); } else if (str.StartsWith (VCFConstants.FORMAT_HEADER_START)) { VCFFormatHeaderLine format = new VCFFormatHeaderLine (str.Substring (9), version); metaData.Add (format); } else if (str.StartsWith (VCFConstants.CONTIG_HEADER_START)) { VCFContigHeaderLine contig = new VCFContigHeaderLine (str.Substring (9), version, VCFConstants.CONTIG_HEADER_START.Substring (2), contigCounter++); metaData.Add (contig); } else if (str.StartsWith (VCFConstants.ALT_HEADER_START)) { //TODO: Consider giving Alt header lines their own class VCFSimpleHeaderLine alt = new VCFSimpleHeaderLine (str.Substring (6), version, VCFConstants.ALT_HEADER_START.Substring (2), "ID", "Description"); metaData.Add (alt); } else { int equals = str.IndexOf ("="); if (equals != -1) { metaData.Add (new VCFHeaderLine (str.Substring (2, equals - 2), str.Substring (equals + 1))); } } } } this.header = new VCFHeader (metaData, sampleNames); if (doOnTheFlyModifications) { this.header = VCFStandardHeaderLines.repairStandardHeaderLines (this.header); } return this.header; }
public override IEnumerator<EventBean> GetEnumerator() { ISet<EventBean> result = new LinkedHashSet<EventBean>(); factory.Chain.GetAll(result, _index); return result.GetEnumerator(); }
public static void PopulateObject( IDictionary<string, object> objectProperties, object top, ExprNodeOrigin exprNodeOrigin, ExprValidationContext exprValidationContext) { var applicableClass = top.GetType(); var writables = PropertyHelper.GetWritableProperties(applicableClass); var annotatedFields = TypeHelper.FindAnnotatedFields(top.GetType(), typeof(DataFlowOpParameterAttribute)); var annotatedMethods = TypeHelper.FindAnnotatedMethods(top.GetType(), typeof(DataFlowOpParameterAttribute)); // find catch-all methods ISet<MethodInfo> catchAllMethods = new LinkedHashSet<MethodInfo>(); if (annotatedMethods != null) { foreach (var method in annotatedMethods) { var anno = (DataFlowOpParameterAttribute) TypeHelper .GetAnnotations<DataFlowOpParameterAttribute>(method.UnwrapAttributes())[0]; if (anno.IsAll) { var parameters = method.GetParameters(); if (parameters.Length == 2 && (parameters[0].ParameterType == typeof(string)) && (parameters[1].ParameterType == typeof(object))) { catchAllMethods.Add(method); continue; } throw new ExprValidationException("Invalid annotation for catch-call"); } } } // map provided values foreach (var property in objectProperties) { var found = false; var propertyName = property.Key; // invoke catch-all setters foreach (var method in catchAllMethods) { try { method.Invoke(top, new[] {propertyName, property.Value}); } catch (MemberAccessException e) { throw new ExprValidationException( "Illegal access invoking method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + method.Name, e); } catch (TargetException e) { throw new ExprValidationException( "Exception invoking method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + method.Name + ": " + e.InnerException.Message, e); } found = true; } if (propertyName.ToLowerInvariant().Equals(CLASS_PROPERTY_NAME)) { continue; } // use the writeable property descriptor (appropriate setter method) from writing the property var descriptor = FindDescriptor(applicableClass, propertyName, writables); if (descriptor != null) { var coerceProperty = CoerceProperty( propertyName, applicableClass, property.Value, descriptor.PropertyType, exprNodeOrigin, exprValidationContext, false, true); try { var writeMember = descriptor.WriteMember; if (writeMember is MethodInfo writeMethod) { writeMethod.Invoke(top, new[] {coerceProperty}); } else if (writeMember is PropertyInfo writeProperty) { writeProperty.SetValue(top, coerceProperty); } else { throw new IllegalStateException("writeMember of invalid type"); } } catch (ArgumentException e) { throw new ExprValidationException( "Illegal argument invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMember.Name + " provided value " + coerceProperty, e); } catch (MemberAccessException e) { throw new ExprValidationException( "Illegal access invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMember.Name, e); } catch (TargetException e) { throw new ExprValidationException( "Exception invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMember.Name + ": " + e.InnerException.Message, e); } continue; } // find the field annotated with {@link @GraphOpProperty} foreach (var annotatedField in annotatedFields) { var anno = (DataFlowOpParameterAttribute) TypeHelper.GetAnnotations( typeof(DataFlowOpParameterAttribute), annotatedField.UnwrapAttributes())[0]; if (anno.Name.Equals(propertyName) || annotatedField.Name.Equals(propertyName)) { var coerceProperty = CoerceProperty( propertyName, applicableClass, property.Value, annotatedField.FieldType, exprNodeOrigin, exprValidationContext, true, true); try { annotatedField.SetValue(top, coerceProperty); } catch (Exception e) { throw new ExprValidationException( "Failed to set field '" + annotatedField.Name + "': " + e.Message, e); } found = true; break; } } if (found) { continue; } throw new ExprValidationException( "Failed to find writable property '" + propertyName + "' for class " + applicableClass.Name); } }
private PatternStreamSpecCompiled CompileInternal( StatementContext context, ICollection <string> eventTypeReferences, bool isInsertInto, ICollection <int> assignedTypeNumberStack, MatchEventSpec tags, ICollection <string> priorAllTags, bool isJoin, bool isContextDeclaration, bool isOnTrigger) { // validate if ((_suppressSameEventMatches || _discardPartialsOnMatch) && (isJoin || isContextDeclaration || isOnTrigger)) { throw new ExprValidationException( "Discard-partials and suppress-matches is not supported in a joins, context declaration and on-action"); } if (tags == null) { tags = new MatchEventSpec(); } var subexpressionIdStack = new ArrayDeque <int>(assignedTypeNumberStack); var evaluatorContextStmt = new ExprEvaluatorContextStatement(context, false); var nodeStack = new Stack <EvalFactoryNode>(); // detemine ordered tags var filterFactoryNodes = EvalNodeUtil.RecursiveGetChildNodes( _evalFactoryNode, FilterForFilterFactoryNodes.INSTANCE); var allTagNamesOrdered = new LinkedHashSet <string>(); if (priorAllTags != null) { allTagNamesOrdered.AddAll(priorAllTags); } foreach (var filterNode in filterFactoryNodes) { var factory = (EvalFilterFactoryNode)filterNode; int tagNumber; if (factory.EventAsName != null) { if (!allTagNamesOrdered.Contains(factory.EventAsName)) { allTagNamesOrdered.Add(factory.EventAsName); tagNumber = allTagNamesOrdered.Count - 1; } else { tagNumber = FindTagNumber(factory.EventAsName, allTagNamesOrdered); } factory.EventAsTagNumber = tagNumber; } } RecursiveCompile( _evalFactoryNode, context, evaluatorContextStmt, eventTypeReferences, isInsertInto, tags, subexpressionIdStack, nodeStack, allTagNamesOrdered); var auditPattern = AuditEnum.PATTERN.GetAudit(context.Annotations); var auditPatternInstance = AuditEnum.PATTERNINSTANCES.GetAudit(context.Annotations); var compiledEvalFactoryNode = _evalFactoryNode; if (auditPattern != null || auditPatternInstance != null) { var instanceCount = new EvalAuditInstanceCount(); compiledEvalFactoryNode = RecursiveAddAuditNode( null, auditPattern != null, auditPatternInstance != null, _evalFactoryNode, instanceCount); } return(new PatternStreamSpecCompiled( compiledEvalFactoryNode, tags.TaggedEventTypes, tags.ArrayEventTypes, allTagNamesOrdered, ViewSpecs, OptionalStreamName, Options, _suppressSameEventMatches, _discardPartialsOnMatch)); }
public static GroupByClauseExpressions GetGroupByRollupExpressions( IList <GroupByClauseElement> groupByElements, SelectClauseSpecRaw selectClauseSpec, ExprNode optionalHavingNode, IList <OrderByItem> orderByList, ExprNodeSubselectDeclaredDotVisitor visitor) { if (groupByElements == null || groupByElements.Count == 0) { return(null); } // walk group-by-elements, determine group-by expressions and rollup nodes var groupByExpressionInfo = GroupByToRollupNodes(groupByElements); // obtain expression nodes, collect unique nodes and assign index var distinctGroupByExpressions = new List <ExprNode>(); var expressionToIndex = new Dictionary <ExprNode, int>(); foreach (ExprNode exprNode in groupByExpressionInfo.Expressions) { var found = false; for (var i = 0; i < distinctGroupByExpressions.Count; i++) { ExprNode other = distinctGroupByExpressions[i]; // find same expression if (ExprNodeUtility.DeepEquals(exprNode, other)) { expressionToIndex.Put(exprNode, i); found = true; break; } } // not seen before if (!found) { expressionToIndex.Put(exprNode, distinctGroupByExpressions.Count); distinctGroupByExpressions.Add(exprNode); } } // determine rollup, validate it is either (not both) var hasGroupingSet = false; var hasRollup = false; foreach (var element in groupByElements) { if (element is GroupByClauseElementGroupingSet) { hasGroupingSet = true; } if (element is GroupByClauseElementRollupOrCube) { hasRollup = true; } } // no-rollup or grouping-sets means simply validate var groupByExpressions = distinctGroupByExpressions.ToArray(); if (!hasRollup && !hasGroupingSet) { return(new GroupByClauseExpressions(groupByExpressions)); } // evaluate rollup node roots IList <GroupByRollupNodeBase> nodes = groupByExpressionInfo.Nodes; var perNodeCombinations = new Object[nodes.Count][]; var context = new GroupByRollupEvalContext(expressionToIndex); try { for (var i = 0; i < nodes.Count; i++) { var node = nodes[i]; var combinations = node.Evaluate(context); perNodeCombinations[i] = new Object[combinations.Count]; for (var j = 0; j < combinations.Count; j++) { perNodeCombinations[i][j] = combinations[j]; } } } catch (GroupByRollupDuplicateException ex) { if (ex.Indexes.Length == 0) { throw new ExprValidationException("Failed to validate the group-by clause, found duplicate specification of the overall grouping '()'"); } else { var writer = new StringWriter(); var delimiter = ""; for (var i = 0; i < ex.Indexes.Length; i++) { writer.Write(delimiter); writer.Write(groupByExpressions[ex.Indexes[i]].ToExpressionStringMinPrecedenceSafe()); delimiter = ", "; } throw new ExprValidationException("Failed to validate the group-by clause, found duplicate specification of expressions (" + writer.ToString() + ")"); } } // enumerate combinations building an index list var combinationEnumeration = new CombinationEnumeration(perNodeCombinations); ICollection <int> combination = new SortedSet <int>(); ICollection <MultiKeyInt> indexList = new LinkedHashSet <MultiKeyInt>(); while (combinationEnumeration.MoveNext()) { combination.Clear(); Object[] combinationOA = combinationEnumeration.Current; foreach (var indexes in combinationOA) { var indexarr = (int[])indexes; foreach (var anIndex in indexarr) { combination.Add(anIndex); } } var indexArr = CollectionUtil.IntArray(combination); indexList.Add(new MultiKeyInt(indexArr)); } // obtain rollup levels var rollupLevels = new int[indexList.Count][]; var count = 0; foreach (var mk in indexList) { rollupLevels[count++] = mk.Keys; } var numberOfLevels = rollupLevels.Length; if (numberOfLevels == 1 && rollupLevels[0].Length == 0) { throw new ExprValidationException("Failed to validate the group-by clause, the overall grouping '()' cannot be the only grouping"); } // obtain select-expression copies for rewrite var expressions = selectClauseSpec.SelectExprList; var selects = new ExprNode[numberOfLevels][]; for (var i = 0; i < numberOfLevels; i++) { selects[i] = new ExprNode[expressions.Count]; for (var j = 0; j < expressions.Count; j++) { SelectClauseElementRaw selectRaw = expressions[j]; if (!(selectRaw is SelectClauseExprRawSpec)) { throw new ExprValidationException("Group-by with rollup requires that the select-clause does not use wildcard"); } var compiled = (SelectClauseExprRawSpec)selectRaw; selects[i][j] = CopyVisitExpression(compiled.SelectExpression, visitor); } } // obtain having-expression copies for rewrite ExprNode[] optHavingNodeCopy = null; if (optionalHavingNode != null) { optHavingNodeCopy = new ExprNode[numberOfLevels]; for (var i = 0; i < numberOfLevels; i++) { optHavingNodeCopy[i] = CopyVisitExpression(optionalHavingNode, visitor); } } // obtain orderby-expression copies for rewrite ExprNode[][] optOrderByCopy = null; if (orderByList != null && orderByList.Count > 0) { optOrderByCopy = new ExprNode[numberOfLevels][]; for (var i = 0; i < numberOfLevels; i++) { optOrderByCopy[i] = new ExprNode[orderByList.Count]; for (var j = 0; j < orderByList.Count; j++) { OrderByItem element = orderByList[j]; optOrderByCopy[i][j] = CopyVisitExpression(element.ExprNode, visitor); } } } return(new GroupByClauseExpressions(groupByExpressions, rollupLevels, selects, optHavingNodeCopy, optOrderByCopy)); }
public override void ProcessCommand() { logger.Info("Processing CommandRequest_SyncMyList"); try { // we will always assume that an anime was downloaded via http first ScheduledUpdate sched = RepoFactory.ScheduledUpdate.GetByUpdateType((int)ScheduledUpdateType.AniDBMyListSync); if (sched == null) { sched = new ScheduledUpdate { UpdateType = (int)ScheduledUpdateType.AniDBMyListSync, UpdateDetails = string.Empty }; } else { int freqHours = Utils.GetScheduledHours(ServerSettings.AniDB_MyList_UpdateFrequency); // if we have run this in the last 24 hours and are not forcing it, then exit TimeSpan tsLastRun = DateTime.Now - sched.LastUpdate; if (tsLastRun.TotalHours < freqHours) { if (!ForceRefresh) { return; } } } // Get the list from AniDB AniDBHTTPCommand_GetMyList cmd = new AniDBHTTPCommand_GetMyList(); cmd.Init(ServerSettings.AniDB_Username, ServerSettings.AniDB_Password); enHelperActivityType ev = cmd.Process(); if (ev != enHelperActivityType.GotMyListHTTP) { logger.Warn("AniDB did not return a successful code: " + ev); return; } int totalItems = 0; int watchedItems = 0; int modifiedItems = 0; double pct = 0; // Add missing files on AniDB var onlineFiles = cmd.MyListItems.ToLookup(a => a.FileID); var dictAniFiles = RepoFactory.AniDB_File.GetAll().ToLookup(a => a.Hash); int missingFiles = 0; foreach (SVR_VideoLocal vid in RepoFactory.VideoLocal.GetAll() .Where(a => !string.IsNullOrEmpty(a.Hash)).ToList()) { // Does it have a linked AniFile if (!dictAniFiles.Contains(vid.Hash)) { continue; } int fileID = dictAniFiles[vid.Hash].FirstOrDefault()?.FileID ?? 0; if (fileID == 0) { continue; } // Is it in MyList if (onlineFiles.Contains(fileID)) { Raw_AniDB_MyListFile file = onlineFiles[fileID].FirstOrDefault(a => a != null); // Update file state if deleted if (file != null && file.State != (int)ServerSettings.AniDB_MyList_StorageState) { int seconds = Commons.Utils.AniDB.GetAniDBDateAsSeconds(file.WatchedDate); CommandRequest_UpdateMyListFileStatus cmdUpdateFile = new CommandRequest_UpdateMyListFileStatus(vid.Hash, file.WatchedDate.HasValue, false, seconds); cmdUpdateFile.Save(); } else if (file != null) { continue; } } // means we have found a file in our local collection, which is not recorded online if (ServerSettings.AniDB_MyList_AddFiles) { CommandRequest_AddFileToMyList cmdAddFile = new CommandRequest_AddFileToMyList(vid.Hash); cmdAddFile.Save(); } missingFiles++; } logger.Info($"MYLIST Missing Files: {missingFiles} Added to queue for inclusion"); List <SVR_JMMUser> aniDBUsers = RepoFactory.JMMUser.GetAniDBUsers(); LinkedHashSet <SVR_AnimeSeries> modifiedSeries = new LinkedHashSet <SVR_AnimeSeries>(); // Remove Missing Files and update watched states (single loop) List <int> filesToRemove = new List <int>(); foreach (Raw_AniDB_MyListFile myitem in cmd.MyListItems) { totalItems++; if (myitem.IsWatched) { watchedItems++; } string hash = string.Empty; SVR_AniDB_File anifile = RepoFactory.AniDB_File.GetByFileID(myitem.FileID); if (anifile != null) { hash = anifile.Hash; } else { // look for manually linked files List <CrossRef_File_Episode> xrefs = RepoFactory.CrossRef_File_Episode.GetByEpisodeID(myitem.EpisodeID); foreach (CrossRef_File_Episode xref in xrefs) { if (xref.CrossRefSource == (int)CrossRefSource.AniDB) { continue; } hash = xref.Hash; break; } } // We couldn't evem find a hash, so remove it if (string.IsNullOrEmpty(hash)) { filesToRemove.Add(myitem.FileID); continue; } // If there's no video local, we don't have it SVR_VideoLocal vl = RepoFactory.VideoLocal.GetByHash(hash); if (vl == null) { filesToRemove.Add(myitem.FileID); continue; } foreach (SVR_JMMUser juser in aniDBUsers) { bool localStatus = false; // doesn't matter which anidb user we use int jmmUserID = juser.JMMUserID; VideoLocal_User userRecord = vl.GetUserRecord(juser.JMMUserID); if (userRecord != null) { localStatus = userRecord.WatchedDate.HasValue; } string action = string.Empty; if (localStatus == myitem.IsWatched) { continue; } // localStatus and AniDB Status are different DateTime?watchedDate = myitem.WatchedDate ?? DateTime.Now; if (localStatus) { // local = watched, anidb = unwatched if (ServerSettings.AniDB_MyList_ReadUnwatched) { modifiedItems++; vl.ToggleWatchedStatus(false, false, watchedDate, false, jmmUserID, false, true); action = "Used AniDB Status"; } else if (ServerSettings.AniDB_MyList_SetWatched) { vl.ToggleWatchedStatus(true, true, userRecord.WatchedDate, false, jmmUserID, false, true); } } else { // means local is un-watched, and anidb is watched if (ServerSettings.AniDB_MyList_ReadWatched) { modifiedItems++; vl.ToggleWatchedStatus(true, false, watchedDate, false, jmmUserID, false, true); action = "Updated Local record to Watched"; } else if (ServerSettings.AniDB_MyList_SetUnwatched) { vl.ToggleWatchedStatus(false, true, watchedDate, false, jmmUserID, false, true); } } vl.GetAnimeEpisodes().Select(a => a.GetAnimeSeries()).Where(a => a != null).ForEach(a => modifiedSeries.Add(a)); logger.Info($"MYLISTDIFF:: File {vl.FileName} - Local Status = {localStatus}, AniDB Status = {myitem.IsWatched} --- {action}"); } } // Actually remove the files if (filesToRemove.Count > 0) { foreach (int fileID in filesToRemove) { CommandRequest_DeleteFileFromMyList deleteCommand = new CommandRequest_DeleteFileFromMyList(fileID); deleteCommand.Save(); } logger.Info($"MYLIST Missing Files: {filesToRemove.Count} Added to queue for deletion"); } modifiedSeries.ForEach(a => a.QueueUpdateStats()); logger.Info($"Process MyList: {totalItems} Items, {missingFiles} Added, {filesToRemove.Count} Deleted, {watchedItems} Watched, {modifiedItems} Modified"); sched.LastUpdate = DateTime.Now; RepoFactory.ScheduledUpdate.Save(sched); } catch (Exception ex) { logger.Error(ex, "Error processing CommandRequest_SyncMyList: {0} ", ex.Message); } }
/// <summary>Constructor. </summary> public FilterHandleSetNode(IReaderWriterLock readWriteLock) { _nodeRwLock = readWriteLock; _callbackSet = new LinkedHashSet <FilterHandle>(); _indizes = EmptyList <FilterParamIndexBase> .Instance; }
internal GroupBundle(Dictionary<string, string> attributes) { EqualityComparer<Asset> assetEqualityComparer = new Asset.EqualityComparer(); Assets = new LinkedHashSet<Asset>(assetEqualityComparer); Attributes = attributes; }
public HashSet<EnvironmentObject> getObjectsAt(XYLocation loc) { HashSet<EnvironmentObject> objectsAt = objsAtLocation.get(loc); if (null == objectsAt) { // Always ensure an empty Set is returned objectsAt = new LinkedHashSet<EnvironmentObject>(); objsAtLocation.put(loc, objectsAt); } return objectsAt; }
public void ShouldBeAbleToDeserializeBinarySerialized() { var set = new LinkedHashSet<int> { 1, 10, 5 }; var formatter = new BinaryFormatter(); using (var stream = new MemoryStream()) { formatter.Serialize(stream, set); stream.Position = 0; var deserialized = (LinkedHashSet<int>) formatter.Deserialize(stream); Assert.That(set, Is.EquivalentTo(deserialized)); } }
/// <summary> /// Ctor. /// </summary> /// <param name="viewChain">views</param> /// <param name="matchRecognizeSpec">specification</param> /// <param name="agentInstanceContext">The agent instance context.</param> /// <param name="isUnbound">true for unbound stream</param> /// <param name="annotations">annotations</param> /// <exception cref="ExprValidationException"> /// Variable ' + defineItem.Identifier + ' has already been defined /// or /// An aggregate function may not appear in a DEFINE clause /// or /// Failed to validate condition expression for variable ' + defineItem.Identifier + ': + ex.Message /// or /// Aggregation functions in the measure-clause must only refer to properties of exactly one group variable returning multiple events /// or /// Aggregation functions in the measure-clause must refer to one or more properties of exactly one group variable returning multiple events /// or /// The measures clause requires that each expression utilizes the AS keyword to assign a column name /// </exception> /// <throws>ExprValidationException if validation fails</throws> public EventRowRegexNFAViewFactory( ViewFactoryChain viewChain, MatchRecognizeSpec matchRecognizeSpec, AgentInstanceContext agentInstanceContext, bool isUnbound, Attribute[] annotations, ConfigurationEngineDefaults.MatchRecognize matchRecognizeConfig) { var parentViewType = viewChain.EventType; _matchRecognizeSpec = matchRecognizeSpec; _isUnbound = isUnbound; _isIterateOnly = HintEnum.ITERATE_ONLY.GetHint(annotations) != null; _matchRecognizeConfig = matchRecognizeConfig; var statementContext = agentInstanceContext.StatementContext; // Expand repeats and permutations _expandedPatternNode = RegexPatternExpandUtil.Expand(matchRecognizeSpec.Pattern); // Determine single-row and multiple-row variables _variablesSingle = new LinkedHashSet <string>(); ISet <string> variablesMultiple = new LinkedHashSet <string>(); EventRowRegexHelper.RecursiveInspectVariables(_expandedPatternNode, false, _variablesSingle, variablesMultiple); // each variable gets associated with a stream number (multiple-row variables as well to hold the current event for the expression). var streamNum = 0; _variableStreams = new LinkedHashMap <string, Pair <int, bool> >(); foreach (var variableSingle in _variablesSingle) { _variableStreams.Put(variableSingle, new Pair <int, bool>(streamNum, false)); streamNum++; } foreach (var variableMultiple in variablesMultiple) { _variableStreams.Put(variableMultiple, new Pair <int, bool>(streamNum, true)); streamNum++; } // mapping of stream to variable _streamVariables = new SortedDictionary <int, string>(); foreach (var entry in _variableStreams) { _streamVariables.Put(entry.Value.First, entry.Key); } // determine visibility rules var visibility = EventRowRegexHelper.DetermineVisibility(_expandedPatternNode); // assemble all single-row variables for expression validation var allStreamNames = new string[_variableStreams.Count]; var allTypes = new EventType[_variableStreams.Count]; streamNum = 0; foreach (var variableSingle in _variablesSingle) { allStreamNames[streamNum] = variableSingle; allTypes[streamNum] = parentViewType; streamNum++; } foreach (var variableMultiple in variablesMultiple) { allStreamNames[streamNum] = variableMultiple; allTypes[streamNum] = parentViewType; streamNum++; } // determine type service for use with DEFINE // validate each DEFINE clause expression ISet <string> definedVariables = new HashSet <string>(); IList <ExprAggregateNode> aggregateNodes = new List <ExprAggregateNode>(); var exprEvaluatorContext = new ExprEvaluatorContextStatement(statementContext, false); _isExprRequiresMultimatchState = new bool[_variableStreams.Count]; for (var defineIndex = 0; defineIndex < matchRecognizeSpec.Defines.Count; defineIndex++) { var defineItem = matchRecognizeSpec.Defines[defineIndex]; if (definedVariables.Contains(defineItem.Identifier)) { throw new ExprValidationException("Variable '" + defineItem.Identifier + "' has already been defined"); } definedVariables.Add(defineItem.Identifier); // stream-type visibilities handled here var typeServiceDefines = EventRowRegexNFAViewFactoryHelper.BuildDefineStreamTypeServiceDefine(statementContext, _variableStreams, defineItem, visibility, parentViewType); var exprNodeResult = HandlePreviousFunctions(defineItem.Expression); var validationContext = new ExprValidationContext( typeServiceDefines, statementContext.MethodResolutionService, null, statementContext.SchedulingService, statementContext.VariableService, statementContext.TableService, exprEvaluatorContext, statementContext.EventAdapterService, statementContext.StatementName, statementContext.StatementId, statementContext.Annotations, statementContext.ContextDescriptor, statementContext.ScriptingService, true, false, true, false, null, false); ExprNode validated; try { // validate validated = ExprNodeUtility.GetValidatedSubtree(ExprNodeOrigin.MATCHRECOGDEFINE, exprNodeResult, validationContext); // check aggregates defineItem.Expression = validated; ExprAggregateNodeUtil.GetAggregatesBottomUp(validated, aggregateNodes); if (!aggregateNodes.IsEmpty()) { throw new ExprValidationException("An aggregate function may not appear in a DEFINE clause"); } } catch (ExprValidationException ex) { throw new ExprValidationException("Failed to validate condition expression for variable '" + defineItem.Identifier + "': " + ex.Message, ex); } // determine access to event properties from multi-matches var visitor = new ExprNodeStreamRequiredVisitor(); validated.Accept(visitor); var streamsRequired = visitor.StreamsRequired; foreach (var streamRequired in streamsRequired) { if (streamRequired >= _variableStreams.Count) { var streamNumIdent = _variableStreams.Get(defineItem.Identifier).First; _isExprRequiresMultimatchState[streamNumIdent] = true; break; } } } _isDefineAsksMultimatches = CollectionUtil.IsAnySet(_isExprRequiresMultimatchState); _defineMultimatchEventBean = _isDefineAsksMultimatches ? EventRowRegexNFAViewFactoryHelper.GetDefineMultimatchBean(statementContext, _variableStreams, parentViewType) : null; // assign "prev" node indexes // Since an expression such as "prior(2, price), prior(8, price)" translates into {2, 8} the relative index is {0, 1}. // Map the expression-supplied index to a relative index var countPrev = 0; foreach (var entry in _callbacksPerIndex) { foreach (var callback in entry.Value) { callback.AssignedIndex = countPrev; } countPrev++; } // determine type service for use with MEASURE IDictionary <string, object> measureTypeDef = new LinkedHashMap <string, object>(); foreach (var variableSingle in _variablesSingle) { measureTypeDef.Put(variableSingle, parentViewType); } foreach (var variableMultiple in variablesMultiple) { measureTypeDef.Put(variableMultiple, new EventType[] { parentViewType }); } var outputEventTypeName = statementContext.StatementId + "_rowrecog"; _compositeEventType = (ObjectArrayEventType)statementContext.EventAdapterService.CreateAnonymousObjectArrayType(outputEventTypeName, measureTypeDef); StreamTypeService typeServiceMeasure = new StreamTypeServiceImpl(_compositeEventType, "MATCH_RECOGNIZE", true, statementContext.EngineURI); // find MEASURE clause aggregations var measureReferencesMultivar = false; IList <ExprAggregateNode> measureAggregateExprNodes = new List <ExprAggregateNode>(); foreach (var measureItem in matchRecognizeSpec.Measures) { ExprAggregateNodeUtil.GetAggregatesBottomUp(measureItem.Expr, measureAggregateExprNodes); } if (!measureAggregateExprNodes.IsEmpty()) { var isIStreamOnly = new bool[allStreamNames.Length]; CompatExtensions.Fill(isIStreamOnly, true); var typeServiceAggregateMeasure = new StreamTypeServiceImpl(allTypes, allStreamNames, isIStreamOnly, statementContext.EngineURI, false); var measureExprAggNodesPerStream = new Dictionary <int, IList <ExprAggregateNode> >(); foreach (var aggregateNode in measureAggregateExprNodes) { // validate absence of group-by aggregateNode.ValidatePositionals(); if (aggregateNode.OptionalLocalGroupBy != null) { throw new ExprValidationException("Match-recognize does not allow aggregation functions to specify a group-by"); } // validate node and params var count = 0; var visitor = new ExprNodeIdentifierVisitor(true); var validationContext = new ExprValidationContext( typeServiceAggregateMeasure, statementContext.MethodResolutionService, null, statementContext.SchedulingService, statementContext.VariableService, statementContext.TableService, exprEvaluatorContext, statementContext.EventAdapterService, statementContext.StatementName, statementContext.StatementId, statementContext.Annotations, statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, true, false, null, false); foreach (var child in aggregateNode.ChildNodes) { var validated = ExprNodeUtility.GetValidatedSubtree(ExprNodeOrigin.MATCHRECOGMEASURE, child, validationContext); validated.Accept(visitor); aggregateNode.SetChildNode(count++, new ExprNodeValidated(validated)); } validationContext = new ExprValidationContext( typeServiceMeasure, statementContext.MethodResolutionService, null, statementContext.SchedulingService, statementContext.VariableService, statementContext.TableService, exprEvaluatorContext, statementContext.EventAdapterService, statementContext.StatementName, statementContext.StatementId, statementContext.Annotations, statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, true, false, null, false); aggregateNode.Validate(validationContext); // verify properties used within the aggregation var aggregatedStreams = new HashSet <int>(); foreach (var pair in visitor.ExprProperties) { aggregatedStreams.Add(pair.First); } int?multipleVarStream = null; foreach (int streamNumAggregated in aggregatedStreams) { var variable = _streamVariables.Get(streamNumAggregated); if (variablesMultiple.Contains(variable)) { measureReferencesMultivar = true; if (multipleVarStream == null) { multipleVarStream = streamNumAggregated; continue; } throw new ExprValidationException("Aggregation functions in the measure-clause must only refer to properties of exactly one group variable returning multiple events"); } } if (multipleVarStream == null) { throw new ExprValidationException("Aggregation functions in the measure-clause must refer to one or more properties of exactly one group variable returning multiple events"); } var aggNodesForStream = measureExprAggNodesPerStream.Get(multipleVarStream.Value); if (aggNodesForStream == null) { aggNodesForStream = new List <ExprAggregateNode>(); measureExprAggNodesPerStream.Put(multipleVarStream.Value, aggNodesForStream); } aggNodesForStream.Add(aggregateNode); } var factoryDesc = AggregationServiceFactoryFactory.GetServiceMatchRecognize(_streamVariables.Count, measureExprAggNodesPerStream, typeServiceAggregateMeasure.EventTypes); _aggregationService = factoryDesc.AggregationServiceFactory.MakeService(agentInstanceContext); _aggregationExpressions = factoryDesc.Expressions; } else { _aggregationService = null; _aggregationExpressions = Collections.GetEmptyList <AggregationServiceAggExpressionDesc>(); } // validate each MEASURE clause expression IDictionary <string, object> rowTypeDef = new LinkedHashMap <string, object>(); var streamRefVisitor = new ExprNodeStreamUseCollectVisitor(); foreach (var measureItem in matchRecognizeSpec.Measures) { if (measureItem.Name == null) { throw new ExprValidationException("The measures clause requires that each expression utilizes the AS keyword to assign a column name"); } var validated = ValidateMeasureClause(measureItem.Expr, typeServiceMeasure, variablesMultiple, _variablesSingle, statementContext); measureItem.Expr = validated; rowTypeDef.Put(measureItem.Name, validated.ExprEvaluator.ReturnType); validated.Accept(streamRefVisitor); } // Determine if any of the multi-var streams are referenced in the measures (non-aggregated only) foreach (var @ref in streamRefVisitor.Referenced) { var rootPropName = @ref.RootPropertyNameIfAny; if (rootPropName != null) { if (variablesMultiple.Contains(rootPropName)) { measureReferencesMultivar = true; break; } } var streamRequired = @ref.StreamReferencedIfAny; if (streamRequired != null) { var streamVariable = _streamVariables.Get(streamRequired.Value); if (streamVariable != null) { var def = _variableStreams.Get(streamVariable); if (def != null && def.Second) { measureReferencesMultivar = true; break; } } } } _isCollectMultimatches = measureReferencesMultivar || _isDefineAsksMultimatches; // create rowevent type var rowEventTypeName = statementContext.StatementId + "_rowrecogrow"; _rowEventType = statementContext.EventAdapterService.CreateAnonymousMapType(rowEventTypeName, rowTypeDef, true); // validate partition-by expressions, if any if (!matchRecognizeSpec.PartitionByExpressions.IsEmpty()) { var typeServicePartition = new StreamTypeServiceImpl(parentViewType, "MATCH_RECOGNIZE_PARTITION", true, statementContext.EngineURI); var validated = new List <ExprNode>(); var validationContext = new ExprValidationContext( typeServicePartition, statementContext.MethodResolutionService, null, statementContext.SchedulingService, statementContext.VariableService, statementContext.TableService, exprEvaluatorContext, statementContext.EventAdapterService, statementContext.StatementName, statementContext.StatementId, statementContext.Annotations, statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, true, false, null, false); foreach (var partitionExpr in matchRecognizeSpec.PartitionByExpressions) { validated.Add(ExprNodeUtility.GetValidatedSubtree(ExprNodeOrigin.MATCHRECOGPARTITION, partitionExpr, validationContext)); } matchRecognizeSpec.PartitionByExpressions = validated; } // validate interval if present if (matchRecognizeSpec.Interval != null) { var validationContext = new ExprValidationContext( new StreamTypeServiceImpl(statementContext.EngineURI, false), statementContext.MethodResolutionService, null, statementContext.SchedulingService, statementContext.VariableService, statementContext.TableService, exprEvaluatorContext, statementContext.EventAdapterService, statementContext.StatementName, statementContext.StatementId, statementContext.Annotations, statementContext.ContextDescriptor, statementContext.ScriptingService, false, false, true, false, null, false); matchRecognizeSpec.Interval.Validate(validationContext); } }
/// <summary>Ctor. </summary> /// <param name="superTypes">super types</param> protected ConfigurationEventTypeWithSupertype(ICollection <String> superTypes) { SuperTypes = new LinkedHashSet <string>(superTypes); }
public static void PopulateObject( string operatorName, int operatorNum, string dataFlowName, IDictionary<string, object> objectProperties, object top, ExprNodeOrigin exprNodeOrigin, ExprValidationContext exprValidationContext, EPDataFlowOperatorParameterProvider optionalParameterProvider, IDictionary<string, object> optionalParameterURIs) { var applicableClass = top.GetType(); var writables = PropertyHelper.GetWritableProperties(applicableClass); var annotatedFields = TypeHelper.FindAnnotatedFields(top.GetType(), typeof(DataFlowOpParameterAttribute)); var annotatedMethods = TypeHelper.FindAnnotatedMethods(top.GetType(), typeof(DataFlowOpParameterAttribute)); // find catch-all methods ISet<MethodInfo> catchAllMethods = new LinkedHashSet<MethodInfo>(); if (annotatedMethods != null) { foreach (var method in annotatedMethods) { var anno = (DataFlowOpParameterAttribute) TypeHelper.GetAnnotations( typeof(DataFlowOpParameterAttribute), method.UnwrapAttributes())[0]; if (anno.IsAll) { var parameterTypes = method.GetParameterTypes(); if (parameterTypes.Length == 2 && parameterTypes[0] == typeof(string) && parameterTypes[1] == typeof(ExprNode)) { catchAllMethods.Add(method); continue; } throw new ExprValidationException("Invalid annotation for catch-call"); } } } // map provided values foreach (var property in objectProperties) { var found = false; var propertyName = property.Key; // invoke catch-all setters foreach (var method in catchAllMethods) { try { method.Invoke(top, new[] {propertyName, property.Value}); } catch (MemberAccessException e) { throw new ExprValidationException( "Illegal access invoking method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + method.Name, e); } catch (TargetException e) { throw new ExprValidationException( "Exception invoking method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + method.Name + ": " + e.InnerException.Message, e); } found = true; } if (propertyName.ToLowerInvariant().Equals(CLASS_PROPERTY_NAME)) { continue; } // use the writeable property descriptor (appropriate setter method) from writing the property var descriptor = FindDescriptor(applicableClass, propertyName, writables); if (descriptor != null) { var coerceProperty = CoerceProperty( propertyName, applicableClass, property.Value, descriptor.PropertyType, exprNodeOrigin, exprValidationContext, false, true); try { var writeMember = descriptor.WriteMember; if (writeMember is MethodInfo writeMethod) { writeMethod.Invoke(top, new[] {coerceProperty}); } else if (writeMember is PropertyInfo writeProperty) { writeProperty.SetValue(top, coerceProperty); } else { throw new IllegalStateException("writeMember of invalid type"); } } catch (ArgumentException e) { throw new ExprValidationException( "Illegal argument invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMember.Name + " provided value " + coerceProperty, e); } catch (MemberAccessException e) { throw new ExprValidationException( "Illegal access invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMember.Name, e); } catch (TargetException e) { throw new ExprValidationException( "Exception invoking setter method for property '" + propertyName + "' for class " + applicableClass.Name + " method " + descriptor.WriteMember.Name + ": " + e.InnerException.Message, e); } continue; } // find the field annotated with {@link @GraphOpProperty} foreach (var annotatedField in annotatedFields) { var anno = (DataFlowOpParameterAttribute) TypeHelper.GetAnnotations<DataFlowOpParameterAttribute>( annotatedField.UnwrapAttributes())[0]; if (anno.Name.Equals(propertyName) || annotatedField.Name.Equals(propertyName)) { var coerceProperty = CoerceProperty( propertyName, applicableClass, property.Value, annotatedField.FieldType, exprNodeOrigin, exprValidationContext, true, true); try { annotatedField.SetValue(top, coerceProperty); } catch (Exception e) { throw new ExprValidationException( "Failed to set field '" + annotatedField.Name + "': " + e.Message, e); } found = true; break; } } if (found) { continue; } throw new ExprValidationException( "Failed to find writable property '" + propertyName + "' for class " + applicableClass.Name); } // second pass: if a parameter URI - value pairs were provided, check that if (optionalParameterURIs != null) { foreach (var annotatedField in annotatedFields) { try { var uri = operatorName + "/" + annotatedField.Name; if (optionalParameterURIs.ContainsKey(uri)) { var value = optionalParameterURIs.Get(uri); annotatedField.SetValue(top, value); if (Log.IsDebugEnabled) { Log.Debug( "Found parameter '" + uri + "' for data flow " + dataFlowName + " setting " + value); } } else { if (Log.IsDebugEnabled) { Log.Debug("Not found parameter '" + uri + "' for data flow " + dataFlowName); } } } catch (Exception e) { throw new ExprValidationException( "Failed to set field '" + annotatedField.Name + "': " + e.Message, e); } } foreach (var method in annotatedMethods) { var anno = (DataFlowOpParameterAttribute) TypeHelper.GetAnnotations( typeof(DataFlowOpParameterAttribute), method.UnwrapAttributes())[0]; if (anno.IsAll) { var parameters = method.GetParameters(); var parameterTypes = method.GetParameterTypes(); if (parameterTypes.Length == 2 && parameterTypes[0] == typeof(string) && parameterTypes[1] == typeof(object)) { foreach (var entry in optionalParameterURIs) { var elements = URIUtil.ParsePathElements(new Uri(entry.Key)); if (elements.Length < 2) { throw new ExprValidationException( "Failed to parse URI '" + entry.Key + "', expected " + "'operator_name/property_name' format"); } if (elements[0].Equals(operatorName)) { try { method.Invoke(top, new[] {elements[1], entry.Value}); } catch (MemberAccessException e) { throw new ExprValidationException( "Illegal access invoking method for property '" + entry.Key + "' for class " + applicableClass.Name + " method " + method.Name, e); } catch (TargetException e) { throw new ExprValidationException( "Exception invoking method for property '" + entry.Key + "' for class " + applicableClass.Name + " method " + method.Name + ": " + e.InnerException.Message, e); } } } } } } } }
/// <summary>Ctor. </summary> public ConfigurationEventTypeWithSupertype() { SuperTypes = new LinkedHashSet <String>(); }
private static InheritanceInfo CreateInheritanceInfo(Type type) { LinkedHashSet <Type> intf = new LinkedHashSet <Type>(); Java.Util.HashSet <Type> baseTypes = new Java.Util.HashSet <Type>(); baseTypes.Add(typeof(object)); // Note that JavaGetInterfaces will only return interfaces declared by the current type, // while .NET returns a flattened map of all interfaces. // http://stackoverflow.com/questions/6616055/get-all-derived-interfaces-of-a-class // http://stackoverflow.com/questions/9793242/type-getinterfaces-for-declared-interfaces-only Java.Util.IQueue <Type> toVisit = new Java.Util.LinkedList <Type>(); toVisit.Add(type); while (toVisit.Peek() != null) { var currentType = toVisit.Poll(); var gti = GenericInstanceFactory.GetGenericTypeInfo(currentType); bool isInterface = gti != null?gti.TypeDefinition.JavaIsInterface() : currentType.JavaIsInterface(); if (!isInterface) { var baseType = currentType.BaseType; if (baseType != null && baseType != typeof(object)) { toVisit.Add(baseType); baseTypes.Add(baseType); } } if (gti == null) { AddInterfaces(currentType.JavaGetInterfaces(), intf, toVisit); continue; } var typeDef = gti.TypeDefinition; var interfaces = typeDef.JavaGetInterfaces(); var genericInstanceClass = typeDef.GetAnnotation <ITypeReflectionInfo>(typeof(ITypeReflectionInfo)); if (genericInstanceClass == null) { AddInterfaces(interfaces, intf, toVisit); continue; } var def = genericInstanceClass.GenericDefinitions().Select(DefinitionParser.Parse); if (def.Length == 0) { AddInterfaces(interfaces, intf, toVisit); continue; } for (int i = 0; i < interfaces.Length; ++i) { interfaces[i] = ToMatchedGenericInstanceType(interfaces[i], currentType, def); } AddInterfaces(interfaces, intf, toVisit); } return(new InheritanceInfo { Interfaces = new JavaCollectionWrapper <Type>(intf).ToArray(), InterfacesSet = new Java.Util.HashSet <Type>(intf), BaseTypes = baseTypes }); }
private static List <IAtomContainer> MakeCut(IBond cut, IAtomContainer mol, Dictionary <IAtom, int> idx, int[][] adjlist) { var beg = cut.Begin; var end = cut.End; var bvisit = new LinkedHashSet <IAtom>(); var evisit = new LinkedHashSet <IAtom>(); var queue = new ArrayDeque <IAtom>(); bvisit.Add(beg); evisit.Add(end); queue.Add(beg); bvisit.Add(end); // stop visits while (queue.Any()) { var atom = queue.Poll(); bvisit.Add(atom); foreach (var w in adjlist[idx[atom]]) { var nbr = mol.Atoms[w]; if (!bvisit.Contains(nbr)) { queue.Add(nbr); } } } bvisit.Remove(end); queue.Add(end); evisit.Add(beg); // stop visits while (queue.Any()) { var atom = queue.Poll(); evisit.Add(atom); foreach (var w in adjlist[idx[atom]]) { var nbr = mol.Atoms[w]; if (!evisit.Contains(nbr)) { queue.Add(nbr); } } } evisit.Remove(beg); var bldr = mol.Builder; var bfrag = bldr.NewAtomContainer(); var efrag = bldr.NewAtomContainer(); int diff = bvisit.Count - evisit.Count; if (diff < -10) { evisit.Clear(); } else if (diff > 10) { bvisit.Clear(); } if (bvisit.Any()) { bfrag.Atoms.Add(bldr.NewPseudoAtom()); foreach (var atom in bvisit) { bfrag.Atoms.Add(atom); } bfrag.AddBond(bfrag.Atoms[0], bfrag.Atoms[1], cut.Order); bfrag.Bonds[0].SetProperty(PropertyName_CutBond, cut); } if (evisit.Any()) { efrag.Atoms.Add(bldr.NewPseudoAtom()); foreach (var atom in evisit) { efrag.Atoms.Add(atom); } efrag.AddBond(efrag.Atoms[0], efrag.Atoms[1], cut.Order); efrag.Bonds[0].SetProperty(PropertyName_CutBond, cut); } foreach (var bond in mol.Bonds) { var a1 = bond.Begin; var a2 = bond.End; if (bvisit.Contains(a1) && bvisit.Contains(a2)) { bfrag.Bonds.Add(bond); } else if (evisit.Contains(a1) && evisit.Contains(a2)) { efrag.Bonds.Add(bond); } } var res = new List <IAtomContainer>(); if (!bfrag.IsEmpty()) { res.Add(bfrag); } if (!efrag.IsEmpty()) { res.Add(efrag); } return(res); }
public static Module ParseInternal(String buffer, String resourceName) { var semicolonSegments = Parse(buffer); var nodes = new List <ParseNode>(); foreach (EPLModuleParseItem segment in semicolonSegments) { nodes.Add(GetModule(segment, resourceName)); } String moduleName = null; int count = 0; foreach (ParseNode node in nodes) { if (node is ParseNodeComment) { continue; } if (node is ParseNodeModule) { if (moduleName != null) { throw new ParseException( "Duplicate use of the 'module' keyword for resource '" + resourceName + "'"); } if (count > 0) { throw new ParseException( "The 'module' keyword must be the first declaration in the module file for resource '" + resourceName + "'"); } moduleName = ((ParseNodeModule)node).ModuleName; } count++; } ICollection <String> uses = new LinkedHashSet <String>(); ICollection <String> imports = new LinkedHashSet <String>(); count = 0; foreach (ParseNode node in nodes) { if ((node is ParseNodeComment) || (node is ParseNodeModule)) { continue; } const string message = "The 'uses' and 'import' keywords must be the first declaration in the module file or follow the 'module' declaration"; if (node is ParseNodeUses) { if (count > 0) { throw new ParseException(message); } uses.Add(((ParseNodeUses)node).Uses); continue; } if (node is ParseNodeImport) { if (count > 0) { throw new ParseException(message); } imports.Add(((ParseNodeImport)node).Imported); continue; } count++; } var items = new List <ModuleItem>(); foreach (ParseNode node in nodes) { if ((node is ParseNodeComment) || (node is ParseNodeExpression)) { bool isComments = (node is ParseNodeComment); items.Add(new ModuleItem(node.Item.Expression, isComments, node.Item.LineNum, node.Item.StartChar, node.Item.EndChar)); } } return(new Module(moduleName, resourceName, uses, imports, items, buffer)); }
public void EmptySetToArray() { var set = new LinkedHashSet <int>(); Assert.That(set.ToArray(), Is.Empty); }
/// <summary> /// Find elements matching selector. /// </summary> /// <param name="query">CSS selector</param> /// <param name="roots">root elements to descend into</param> /// <returns>matching elements, empty if not</returns> public static Elements Select(string query, IEnumerable<Element> roots) { Validate.NotEmpty(query); Validate.NotNull(roots); LinkedHashSet<Element> elements = new LinkedHashSet<Element>(); foreach (Element root in roots) { elements.AddRange(Select(query, root)); } return new Elements(elements); }
/// <summary>Empties the queue without processing any of the objects in it.</summary> /// <remarks>Empties the queue without processing any of the objects in it.</remarks> public virtual void Clear() { Log.D(Database.Tag, this + ": clear() called, setting inbox to null"); Unschedule(); inbox = null; }
/** for all alts, find which ref X or r needs List Must see across alts. If any alt needs X or r as list, then define as list. */ public virtual ISet<Decl.Decl> GetDeclsForAllElements(IList<AltAST> altASTs) { ISet<string> needsList = new HashSet<string>(); ISet<string> optional = new HashSet<string>(); ISet<string> suppress = new HashSet<string>(); IList<GrammarAST> allRefs = new List<GrammarAST>(); foreach (AltAST ast in altASTs) { IntervalSet reftypes = new IntervalSet(ANTLRParser.RULE_REF, ANTLRParser.TOKEN_REF); IList<GrammarAST> refs = ast.GetNodesWithType(reftypes); foreach (var @ref in refs) allRefs.Add(@ref); System.Tuple<FrequencySet<string>, FrequencySet<string>> minAndAltFreq = GetElementFrequenciesForAlt(ast); FrequencySet<string> minFreq = minAndAltFreq.Item1; FrequencySet<string> altFreq = minAndAltFreq.Item2; foreach (GrammarAST t in refs) { string refLabelName = GetLabelName(rule.g, t); if (altFreq.GetCount(refLabelName) == 0) { suppress.Add(refLabelName); } if (minFreq.GetCount(refLabelName) == 0) { optional.Add(refLabelName); } if (altFreq.GetCount(refLabelName) > 1) { needsList.Add(refLabelName); } } } ISet<Decl.Decl> decls = new LinkedHashSet<Decl.Decl>(); foreach (GrammarAST t in allRefs) { string refLabelName = GetLabelName(rule.g, t); if (suppress.Contains(refLabelName)) { continue; } IList<Decl.Decl> d = GetDeclForAltElement(t, refLabelName, needsList.Contains(refLabelName), optional.Contains(refLabelName)); decls.UnionWith(d); } return decls; }
/// <summary>Augment the coreferent mention map with acronym matches.</summary> private static void AcronymMatch(IList <ICoreMap> mentions, IDictionary <ICoreMap, ICollection <ICoreMap> > mentionsMap) { int ticks = 0; // Get all the candidate antecedents IDictionary <IList <string>, ICoreMap> textToMention = new Dictionary <IList <string>, ICoreMap>(); foreach (ICoreMap mention in mentions) { string nerTag = mention.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); if (nerTag != null && (nerTag.Equals(KBPRelationExtractor.NERTag.Organization.name) || nerTag.Equals(KBPRelationExtractor.NERTag.Location.name))) { IList <string> tokens = mention.Get(typeof(CoreAnnotations.TokensAnnotation)).Stream().Map(null).Collect(Collectors.ToList()); if (tokens.Count > 1) { textToMention[tokens] = mention; } } } // Look for candidate acronyms foreach (ICoreMap acronym in mentions) { string nerTag = acronym.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); if (nerTag != null && (nerTag.Equals(KBPRelationExtractor.NERTag.Organization.name) || nerTag.Equals(KBPRelationExtractor.NERTag.Location.name))) { string text = acronym.Get(typeof(CoreAnnotations.TextAnnotation)); if (!text.Contains(" ")) { // Candidate acronym ICollection <ICoreMap> acronymCluster = mentionsMap[acronym]; if (acronymCluster == null) { acronymCluster = new LinkedHashSet <ICoreMap>(); acronymCluster.Add(acronym); } // Try to match it to an antecedent foreach (KeyValuePair <IList <string>, ICoreMap> entry in textToMention) { // Time out if we take too long in this loop. ticks += 1; if (ticks > 1000) { return; } // Check if the pair is an acronym if (AcronymMatcher.IsAcronym(text, entry.Key)) { // Case: found a coreferent pair ICoreMap coreferent = entry.Value; ICollection <ICoreMap> coreferentCluster = mentionsMap[coreferent]; if (coreferentCluster == null) { coreferentCluster = new LinkedHashSet <ICoreMap>(); coreferentCluster.Add(coreferent); } // Create a new coreference cluster ICollection <ICoreMap> newCluster = new LinkedHashSet <ICoreMap>(); Sharpen.Collections.AddAll(newCluster, acronymCluster); Sharpen.Collections.AddAll(newCluster, coreferentCluster); // Set the new cluster foreach (ICoreMap key in newCluster) { mentionsMap[key] = newCluster; } } } } } } }
public void SetDefaults() { DateTime today = new DateTime(DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day); StringSet = new HashSet<string> {"foo", "bar", "baz"}; StringDateMap = new SortedList(); StringDateMap.Add("now", DateTime.Now); StringDateMap.Add("never", null); // value is persisted since NH-2199 // according to SQL Server the big bag happened in 1753 ;) StringDateMap.Add("big bang", new DateTime(1753, 01, 01)); //StringDateMap.Add( "millenium", new DateTime( 2000, 01, 01 ) ); StringArray = StringSet.ToArray(); StringList = new ArrayList(StringArray); IntArray = new int[] {1, 3, 3, 7}; FooArray = new Foo[0]; Customs = new ArrayList(); Customs.Add(new String[] {"foo", "bar"}); Customs.Add(new String[] {"A", "B"}); Customs.Add(new String[] {"1", "2"}); FooSet = new HashSet<FooProxy>(); Components = new FooComponent[] { new FooComponent("foo", 42, null, null), new FooComponent("bar", 88, null, new FooComponent("sub", 69, null, null)) }; TimeArray = new DateTime[] { new DateTime(), new DateTime(), new DateTime(), // H2.1 has null here, but it's illegal on .NET new DateTime(0) }; Count = 667; Name = "Bazza"; TopComponents = new ArrayList(); TopComponents.Add(new FooComponent("foo", 11, new DateTime[] {today, new DateTime(2123, 1, 1)}, null)); TopComponents.Add( new FooComponent("bar", 22, new DateTime[] {new DateTime(2007, 2, 3), new DateTime(1945, 6, 1)}, null)); TopComponents.Add(null); Bag = new ArrayList(); Bag.Add("duplicate"); Bag.Add("duplicate"); Bag.Add("duplicate"); Bag.Add("unique"); Cached = new LinkedHashSet<CompositeElement>(); CompositeElement ce = new CompositeElement(); ce.Foo = "foo"; ce.Bar = "bar"; CompositeElement ce2 = new CompositeElement(); ce2.Foo = "fooxxx"; ce2.Bar = "barxxx"; Cached.Add(ce); Cached.Add(ce2); CachedMap = new SortedList(); CachedMap.Add(this, ce); }
/// <summary>Annotate this document for KBP relations.</summary> /// <param name="annotation">The document to annotate.</param> public virtual void Annotate(Annotation annotation) { // get a list of sentences for this annotation IList <ICoreMap> sentences = annotation.Get(typeof(CoreAnnotations.SentencesAnnotation)); // Create simple document Document doc = new Document(kbpProperties, serializer.ToProto(annotation)); // Get the mentions in the document IList <ICoreMap> mentions = new List <ICoreMap>(); foreach (ICoreMap sentence in sentences) { Sharpen.Collections.AddAll(mentions, sentence.Get(typeof(CoreAnnotations.MentionsAnnotation))); } // Compute coreferent clusters // (map an index to a KBP mention) IDictionary <Pair <int, int>, ICoreMap> mentionByStartIndex = new Dictionary <Pair <int, int>, ICoreMap>(); foreach (ICoreMap mention in mentions) { foreach (CoreLabel token in mention.Get(typeof(CoreAnnotations.TokensAnnotation))) { mentionByStartIndex[Pair.MakePair(token.SentIndex(), token.Index())] = mention; } } // (collect coreferent KBP mentions) IDictionary <ICoreMap, ICollection <ICoreMap> > mentionsMap = new Dictionary <ICoreMap, ICollection <ICoreMap> >(); // map from canonical mention -> other mentions if (annotation.Get(typeof(CorefCoreAnnotations.CorefChainAnnotation)) != null) { foreach (KeyValuePair <int, CorefChain> chain in annotation.Get(typeof(CorefCoreAnnotations.CorefChainAnnotation))) { ICoreMap firstMention = null; foreach (CorefChain.CorefMention mention_1 in chain.Value.GetMentionsInTextualOrder()) { ICoreMap kbpMention = null; for (int i = mention_1.startIndex; i < mention_1.endIndex; ++i) { if (mentionByStartIndex.Contains(Pair.MakePair(mention_1.sentNum - 1, i))) { kbpMention = mentionByStartIndex[Pair.MakePair(mention_1.sentNum - 1, i)]; break; } } if (firstMention == null) { firstMention = kbpMention; } if (kbpMention != null) { if (!mentionsMap.Contains(firstMention)) { mentionsMap[firstMention] = new LinkedHashSet <ICoreMap>(); } mentionsMap[firstMention].Add(kbpMention); } } } } // (coreference acronyms) AcronymMatch(mentions, mentionsMap); // (ensure valid NER tag for canonical mention) foreach (ICoreMap key in new HashSet <ICoreMap>(mentionsMap.Keys)) { if (key.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)) == null) { ICoreMap newKey = null; foreach (ICoreMap candidate in mentionsMap[key]) { if (candidate.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)) != null) { newKey = candidate; break; } } if (newKey != null) { mentionsMap[newKey] = Sharpen.Collections.Remove(mentionsMap, key); } else { Sharpen.Collections.Remove(mentionsMap, key); } } } // case: no mention in this chain has an NER tag. // Propagate Entity Link foreach (KeyValuePair <ICoreMap, ICollection <ICoreMap> > entry in mentionsMap) { string entityLink = entry.Key.Get(typeof(CoreAnnotations.WikipediaEntityAnnotation)); if (entityLink != null) { foreach (ICoreMap mention_1 in entry.Value) { foreach (CoreLabel token in mention_1.Get(typeof(CoreAnnotations.TokensAnnotation))) { token.Set(typeof(CoreAnnotations.WikipediaEntityAnnotation), entityLink); } } } } // create a mapping of char offset pairs to KBPMention Dictionary <Pair <int, int>, ICoreMap> charOffsetToKBPMention = new Dictionary <Pair <int, int>, ICoreMap>(); foreach (ICoreMap mention_2 in mentions) { int nerMentionCharBegin = mention_2.Get(typeof(CoreAnnotations.CharacterOffsetBeginAnnotation)); int nerMentionCharEnd = mention_2.Get(typeof(CoreAnnotations.CharacterOffsetEndAnnotation)); charOffsetToKBPMention[new Pair <int, int>(nerMentionCharBegin, nerMentionCharEnd)] = mention_2; } // Create a canonical mention map IDictionary <ICoreMap, ICoreMap> mentionToCanonicalMention; if (kbpLanguage.Equals(LanguageInfo.HumanLanguage.Spanish)) { mentionToCanonicalMention = spanishCorefSystem.CanonicalMentionMapFromEntityMentions(mentions); if (Verbose) { log.Info("---"); log.Info("basic spanish coref results"); foreach (ICoreMap originalMention in mentionToCanonicalMention.Keys) { if (!originalMention.Equals(mentionToCanonicalMention[originalMention])) { log.Info("mapped: " + originalMention + " to: " + mentionToCanonicalMention[originalMention]); } } } } else { mentionToCanonicalMention = new Dictionary <ICoreMap, ICoreMap>(); } // check if there is coref info ICollection <KeyValuePair <int, CorefChain> > corefChains; if (annotation.Get(typeof(CorefCoreAnnotations.CorefChainAnnotation)) != null && !kbpLanguage.Equals(LanguageInfo.HumanLanguage.Spanish)) { corefChains = annotation.Get(typeof(CorefCoreAnnotations.CorefChainAnnotation)); } else { corefChains = new HashSet <KeyValuePair <int, CorefChain> >(); } foreach (KeyValuePair <int, CorefChain> indexCorefChainPair in corefChains) { CorefChain corefChain = indexCorefChainPair.Value; Pair <IList <ICoreMap>, ICoreMap> corefChainKBPMentionsAndBestIndex = CorefChainToKBPMentions(corefChain, annotation, charOffsetToKBPMention); IList <ICoreMap> corefChainKBPMentions = corefChainKBPMentionsAndBestIndex.First(); ICoreMap bestKBPMentionForChain = corefChainKBPMentionsAndBestIndex.Second(); if (bestKBPMentionForChain != null) { foreach (ICoreMap kbpMention in corefChainKBPMentions) { if (kbpMention != null) { //System.err.println("---"); // ad hoc filters ; assume acceptable unless a filter blocks it bool acceptableLink = true; // block people matches without a token overlap, exempting pronominal to non-pronominal // good: Ashton --> Catherine Ashton // good: she --> Catherine Ashton // bad: Morsi --> Catherine Ashton string kbpMentionNERTag = kbpMention.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); string bestKBPMentionForChainNERTag = bestKBPMentionForChain.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); if (kbpMentionNERTag != null && bestKBPMentionForChainNERTag != null && kbpMentionNERTag.Equals("PERSON") && bestKBPMentionForChainNERTag.Equals("PERSON") && !KbpIsPronominalMention(kbpMention.Get(typeof(CoreAnnotations.TokensAnnotation))[0] ) && !KbpIsPronominalMention(bestKBPMentionForChain.Get(typeof(CoreAnnotations.TokensAnnotation))[0])) { //System.err.println("testing PERSON to PERSON coref link"); bool tokenMatchFound = false; foreach (CoreLabel kbpToken in kbpMention.Get(typeof(CoreAnnotations.TokensAnnotation))) { foreach (CoreLabel bestKBPToken in bestKBPMentionForChain.Get(typeof(CoreAnnotations.TokensAnnotation))) { if (kbpToken.Word().ToLower().Equals(bestKBPToken.Word().ToLower())) { tokenMatchFound = true; break; } } if (tokenMatchFound) { break; } } if (!tokenMatchFound) { acceptableLink = false; } } // check the coref link passed the filters if (acceptableLink) { mentionToCanonicalMention[kbpMention] = bestKBPMentionForChain; } } } } } //System.err.println("kbp mention: " + kbpMention.get(CoreAnnotations.TextAnnotation.class)); //System.err.println("coref mention: " + bestKBPMentionForChain.get(CoreAnnotations.TextAnnotation.class)); // (add missing mentions) mentions.Stream().Filter(null).ForEach(null); // handle acronym coreference Dictionary <string, IList <ICoreMap> > acronymClusters = new Dictionary <string, IList <ICoreMap> >(); Dictionary <string, IList <ICoreMap> > acronymInstances = new Dictionary <string, IList <ICoreMap> >(); foreach (ICoreMap acronymMention in mentionToCanonicalMention.Keys) { string acronymNERTag = acronymMention.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); if ((acronymMention == mentionToCanonicalMention[acronymMention]) && acronymNERTag != null && (acronymNERTag.Equals(KBPRelationExtractor.NERTag.Organization.name) || acronymNERTag.Equals(KBPRelationExtractor.NERTag.Location.name))) { string acronymText = acronymMention.Get(typeof(CoreAnnotations.TextAnnotation)); IList <ICoreMap> coreferentMentions = new List <ICoreMap>(); // define acronyms as not containing spaces (e.g. ACLU) if (!acronymText.Contains(" ")) { int numCoreferentsChecked = 0; foreach (ICoreMap coreferentMention in mentions) { // only check first 1000 if (numCoreferentsChecked > 1000) { break; } // don't check a mention against itself if (acronymMention == coreferentMention) { continue; } // don't check other mentions without " " string coreferentText = coreferentMention.Get(typeof(CoreAnnotations.TextAnnotation)); if (!coreferentText.Contains(" ")) { continue; } numCoreferentsChecked++; IList <string> coreferentTokenStrings = coreferentMention.Get(typeof(CoreAnnotations.TokensAnnotation)).Stream().Map(null).Collect(Collectors.ToList()); // when an acronym match is found: // store every mention (that isn't ACLU) that matches with ACLU in acronymClusters // store every instance of "ACLU" in acronymInstances // afterwards find the best mention in acronymClusters, and match it to every mention in acronymInstances if (AcronymMatcher.IsAcronym(acronymText, coreferentTokenStrings)) { if (!acronymClusters.Contains(acronymText)) { acronymClusters[acronymText] = new List <ICoreMap>(); } if (!acronymInstances.Contains(acronymText)) { acronymInstances[acronymText] = new List <ICoreMap>(); } acronymClusters[acronymText].Add(coreferentMention); acronymInstances[acronymText].Add(acronymMention); } } } } } // process each acronym (e.g. ACLU) foreach (string acronymText_1 in acronymInstances.Keys) { // find longest ORG or null ICoreMap bestORG = null; foreach (ICoreMap coreferentMention in acronymClusters[acronymText_1]) { if (!coreferentMention.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)).Equals(KBPRelationExtractor.NERTag.Organization.name)) { continue; } if (bestORG == null) { bestORG = coreferentMention; } else { if (coreferentMention.Get(typeof(CoreAnnotations.TextAnnotation)).Length > bestORG.Get(typeof(CoreAnnotations.TextAnnotation)).Length) { bestORG = coreferentMention; } } } // find longest LOC or null ICoreMap bestLOC = null; foreach (ICoreMap coreferentMention_1 in acronymClusters[acronymText_1]) { if (!coreferentMention_1.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)).Equals(KBPRelationExtractor.NERTag.Location.name)) { continue; } if (bestLOC == null) { bestLOC = coreferentMention_1; } else { if (coreferentMention_1.Get(typeof(CoreAnnotations.TextAnnotation)).Length > bestLOC.Get(typeof(CoreAnnotations.TextAnnotation)).Length) { bestLOC = coreferentMention_1; } } } // link ACLU to "American Civil Liberties Union" ; make sure NER types match foreach (ICoreMap acronymMention_1 in acronymInstances[acronymText_1]) { string mentionType = acronymMention_1.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); if (mentionType.Equals(KBPRelationExtractor.NERTag.Organization.name) && bestORG != null) { mentionToCanonicalMention[acronymMention_1] = bestORG; } if (mentionType.Equals(KBPRelationExtractor.NERTag.Location.name) && bestLOC != null) { mentionToCanonicalMention[acronymMention_1] = bestLOC; } } } // Cluster mentions by sentence IList <ICoreMap>[] mentionsBySentence = new IList[annotation.Get(typeof(CoreAnnotations.SentencesAnnotation)).Count]; for (int i_1 = 0; i_1 < mentionsBySentence.Length; ++i_1) { mentionsBySentence[i_1] = new List <ICoreMap>(); } foreach (ICoreMap mention_3 in mentionToCanonicalMention.Keys) { mentionsBySentence[mention_3.Get(typeof(CoreAnnotations.SentenceIndexAnnotation))].Add(mention_3); } // Classify for (int sentenceI = 0; sentenceI < mentionsBySentence.Length; ++sentenceI) { Dictionary <string, RelationTriple> relationStringsToTriples = new Dictionary <string, RelationTriple>(); IList <RelationTriple> finalTriplesList = new List <RelationTriple>(); // the annotations IList <ICoreMap> candidates = mentionsBySentence[sentenceI]; // determine sentence length int sentenceLength = annotation.Get(typeof(CoreAnnotations.SentencesAnnotation))[sentenceI].Get(typeof(CoreAnnotations.TokensAnnotation)).Count; // check if sentence is too long, if it's too long don't run kbp if (maxLength != -1 && sentenceLength > maxLength) { // set the triples annotation to an empty list of RelationTriples annotation.Get(typeof(CoreAnnotations.SentencesAnnotation))[sentenceI].Set(typeof(CoreAnnotations.KBPTriplesAnnotation), finalTriplesList); // continue to next sentence continue; } // sentence isn't too long, so continue processing this sentence for (int subjI = 0; subjI < candidates.Count; ++subjI) { ICoreMap subj = candidates[subjI]; int subjBegin = subj.Get(typeof(CoreAnnotations.TokensAnnotation))[0].Index() - 1; int subjEnd = subj.Get(typeof(CoreAnnotations.TokensAnnotation))[subj.Get(typeof(CoreAnnotations.TokensAnnotation)).Count - 1].Index(); Optional <KBPRelationExtractor.NERTag> subjNER = KBPRelationExtractor.NERTag.FromString(subj.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation))); if (subjNER.IsPresent()) { for (int objI = 0; objI < candidates.Count; ++objI) { if (subjI == objI) { continue; } if (Thread.Interrupted()) { throw new RuntimeInterruptedException(); } ICoreMap obj = candidates[objI]; int objBegin = obj.Get(typeof(CoreAnnotations.TokensAnnotation))[0].Index() - 1; int objEnd = obj.Get(typeof(CoreAnnotations.TokensAnnotation))[obj.Get(typeof(CoreAnnotations.TokensAnnotation)).Count - 1].Index(); Optional <KBPRelationExtractor.NERTag> objNER = KBPRelationExtractor.NERTag.FromString(obj.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation))); if (objNER.IsPresent() && KBPRelationExtractor.RelationType.PlausiblyHasRelation(subjNER.Get(), objNER.Get())) { // type check KBPRelationExtractor.KBPInput input = new KBPRelationExtractor.KBPInput(new Span(subjBegin, subjEnd), new Span(objBegin, objEnd), subjNER.Get(), objNER.Get(), doc.Sentence(sentenceI)); // -- BEGIN Classify Pair <string, double> prediction = extractor.Classify(input); // -- END Classify // Handle the classifier output if (!KBPStatisticalExtractor.NoRelation.Equals(prediction.first)) { RelationTriple triple = new RelationTriple.WithLink(subj.Get(typeof(CoreAnnotations.TokensAnnotation)), mentionToCanonicalMention[subj].Get(typeof(CoreAnnotations.TokensAnnotation)), Java.Util.Collections.SingletonList(new CoreLabel(new Word (ConvertRelationNameToLatest(prediction.first)))), obj.Get(typeof(CoreAnnotations.TokensAnnotation)), mentionToCanonicalMention[obj].Get(typeof(CoreAnnotations.TokensAnnotation)), prediction.second, sentences[sentenceI].Get(typeof(SemanticGraphCoreAnnotations.CollapsedCCProcessedDependenciesAnnotation )), subj.Get(typeof(CoreAnnotations.WikipediaEntityAnnotation)), obj.Get(typeof(CoreAnnotations.WikipediaEntityAnnotation))); string tripleString = triple.SubjectGloss() + "\t" + triple.RelationGloss() + "\t" + triple.ObjectGloss(); // ad hoc checks for problems bool acceptableTriple = true; if (triple.ObjectGloss().Equals(triple.SubjectGloss()) && triple.RelationGloss().EndsWith("alternate_names")) { acceptableTriple = false; } // only add this triple if it has the highest confidence ; this process generates duplicates with // different confidence scores, so we want to filter out the lower confidence versions if (acceptableTriple && !relationStringsToTriples.Contains(tripleString)) { relationStringsToTriples[tripleString] = triple; } else { if (acceptableTriple && triple.confidence > relationStringsToTriples[tripleString].confidence) { relationStringsToTriples[tripleString] = triple; } } } } } } } finalTriplesList = new ArrayList(relationStringsToTriples.Values); // Set triples annotation.Get(typeof(CoreAnnotations.SentencesAnnotation))[sentenceI].Set(typeof(CoreAnnotations.KBPTriplesAnnotation), finalTriplesList); } }