public IAsyncEnumerable <TModel> FindAllAsync() { var all = Collection.FindAll().ToList(); toWrite?.AddAll(all); return(all.ToAsyncEnumerable()); }
internal static PkixPolicyNode ProcessCertD( PkixCertPath certPath, int index, ISet acceptablePolicies, PkixPolicyNode validPolicyTree, IList[] policyNodes, int inhibitAnyPolicy) //throws CertPathValidatorException { IList certs = certPath.Certificates; X509Certificate cert = (X509Certificate)certs[index]; int n = certs.Count; // i as defined in the algorithm description int i = n - index; // // (d) policy Information checking against initial policy and // policy mapping // Asn1Sequence certPolicies = null; try { certPolicies = DerSequence.GetInstance( PkixCertPathValidatorUtilities.GetExtensionValue(cert, X509Extensions.CertificatePolicies)); } catch (Exception e) { throw new PkixCertPathValidatorException( "Could not read certificate policies extension from certificate.", e, certPath, index); } if (certPolicies != null && validPolicyTree != null) { // // (d) (1) // ISet pols = new HashSet(); foreach (Asn1Encodable ae in certPolicies) { PolicyInformation pInfo = PolicyInformation.GetInstance(ae.ToAsn1Object()); DerObjectIdentifier pOid = pInfo.PolicyIdentifier; pols.Add(pOid.Id); if (!Rfc3280CertPathUtilities.ANY_POLICY.Equals(pOid.Id)) { ISet pq = null; try { pq = PkixCertPathValidatorUtilities.GetQualifierSet(pInfo.PolicyQualifiers); } catch (PkixCertPathValidatorException ex) { throw new PkixCertPathValidatorException( "Policy qualifier info set could not be build.", ex, certPath, index); } bool match = PkixCertPathValidatorUtilities.ProcessCertD1i(i, policyNodes, pOid, pq); if (!match) { PkixCertPathValidatorUtilities.ProcessCertD1ii(i, policyNodes, pOid, pq); } } } if (acceptablePolicies.IsEmpty || acceptablePolicies.Contains(Rfc3280CertPathUtilities.ANY_POLICY)) { acceptablePolicies.Clear(); acceptablePolicies.AddAll(pols); } else { ISet t1 = new HashSet(); foreach (object o in acceptablePolicies) { if (pols.Contains(o)) { t1.Add(o); } } acceptablePolicies.Clear(); acceptablePolicies.AddAll(t1); } // // (d) (2) // if ((inhibitAnyPolicy > 0) || ((i < n) && PkixCertPathValidatorUtilities.IsSelfIssued(cert))) { foreach (Asn1Encodable ae in certPolicies) { PolicyInformation pInfo = PolicyInformation.GetInstance(ae.ToAsn1Object()); if (Rfc3280CertPathUtilities.ANY_POLICY.Equals(pInfo.PolicyIdentifier.Id)) { ISet _apq = PkixCertPathValidatorUtilities.GetQualifierSet(pInfo.PolicyQualifiers); IList _nodes = policyNodes[i - 1]; for (int k = 0; k < _nodes.Count; k++) { PkixPolicyNode _node = (PkixPolicyNode)_nodes[k]; IEnumerator _policySetIter = _node.ExpectedPolicies.GetEnumerator(); while (_policySetIter.MoveNext()) { object _tmp = _policySetIter.Current; string _policy; if (_tmp is string) { _policy = (string)_tmp; } else if (_tmp is DerObjectIdentifier) { _policy = ((DerObjectIdentifier)_tmp).Id; } else { continue; } bool _found = false; foreach (PkixPolicyNode _child in _node.Children) { if (_policy.Equals(_child.ValidPolicy)) { _found = true; } } if (!_found) { ISet _newChildExpectedPolicies = new HashSet(); _newChildExpectedPolicies.Add(_policy); PkixPolicyNode _newChild = new PkixPolicyNode(Platform.CreateArrayList(), i, _newChildExpectedPolicies, _node, _apq, _policy, false); _node.AddChild(_newChild); policyNodes[i].Add(_newChild); } } } break; } } } PkixPolicyNode _validPolicyTree = validPolicyTree; // // (d) (3) // for (int j = (i - 1); j >= 0; j--) { IList nodes = policyNodes[j]; for (int k = 0; k < nodes.Count; k++) { PkixPolicyNode node = (PkixPolicyNode)nodes[k]; if (!node.HasChildren) { _validPolicyTree = PkixCertPathValidatorUtilities.RemovePolicyNode(_validPolicyTree, policyNodes, node); if (_validPolicyTree == null) { break; } } } } // // d (4) // ISet criticalExtensionOids = cert.GetCriticalExtensionOids(); if (criticalExtensionOids != null) { bool critical = criticalExtensionOids.Contains(X509Extensions.CertificatePolicies.Id); IList nodes = policyNodes[i]; for (int j = 0; j < nodes.Count; j++) { PkixPolicyNode node = (PkixPolicyNode)nodes[j]; node.IsCritical = critical; } } return _validPolicyTree; } return null; }
public override void ExtractTerms(ISet<Term> terms) { terms.AddAll(this.terms); }
/// <summary> /// Recurse all the way up the tree until all referenced structures are found - in the order in which they are found /// </summary> /// <param name="getParentsFor"> /// The list of structures to get the parents for. /// </param> /// <param name="ignoreParents"> /// The list of structures to ignore the parents. /// </param> /// <param name="filterSet"> /// The filter set. /// </param> /// /// <returns> /// The referenced structures. /// </returns> private IEnumerable<IMaintainableObject> RecurseUpTree( IEnumerable<IMaintainableObject> getParentsFor, ISet<IMaintainableObject> ignoreParents, ISet<IMaintainableObject> filterSet) { var crossReferencingStructures = new List<IMaintainableObject>(); foreach (IMaintainableObject oldBean in getParentsFor) { crossReferencingStructures.AddAll(this._crossReferencingRetrievalManager.GetCrossReferencingStructures(oldBean.AsReference, false)); } //Filter out the parents we do not want to reversion crossReferencingStructures.RemoveItemList(ignoreParents); this.FilterReferencingStructures(crossReferencingStructures, filterSet); ignoreParents.AddAll(crossReferencingStructures); if (crossReferencingStructures.Count > 0) { IEnumerable<IMaintainableObject> ancestors = this.RecurseUpTree( crossReferencingStructures, ignoreParents, filterSet); foreach (IMaintainableObject currentAncestor in ancestors) { if (!crossReferencingStructures.Contains(currentAncestor)) { crossReferencingStructures.AddAll(ancestors); } } } return crossReferencingStructures; }
protected virtual void addUnboundScope(ICollection <IRandomVariable> vars) { unboundScope.AddAll(vars); }
public override void Run() { // TODO: would be better if this were cross thread, so that we make sure one thread deleting anothers added docs works: IList <string> toDeleteIDs = new List <string>(); IList <SubDocs> toDeleteSubDocs = new List <SubDocs>(); while (Environment.TickCount < stopTime && !outerInstance.failed.Get()) { try { // Occasional longish pause if running // nightly if (LuceneTestCase.TEST_NIGHTLY && Random().Next(6) == 3) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": now long sleep"); } Thread.Sleep(TestUtil.NextInt(Random(), 50, 500)); } // Rate limit ingest rate: if (Random().Next(7) == 5) { Thread.Sleep(TestUtil.NextInt(Random(), 1, 10)); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": done sleep"); } } Document doc = docs.NextDoc(); if (doc == null) { break; } // Maybe add randomly named field string addedField; if (Random().NextBoolean()) { addedField = "extra" + Random().Next(40); doc.Add(outerInstance.NewTextField(addedField, "a random field", Field.Store.YES)); } else { addedField = null; } if (Random().NextBoolean()) { if (Random().NextBoolean()) { // Add/update doc block: string packID; SubDocs delSubDocs; if (toDeleteSubDocs.Count > 0 && Random().NextBoolean()) { delSubDocs = toDeleteSubDocs[Random().Next(toDeleteSubDocs.Count)]; Debug.Assert(!delSubDocs.deleted); toDeleteSubDocs.Remove(delSubDocs); // Update doc block, replacing prior packID packID = delSubDocs.packID; } else { delSubDocs = null; // Add doc block, using new packID packID = outerInstance.packCount.GetAndIncrement().ToString(CultureInfo.InvariantCulture); } Field packIDField = outerInstance.NewStringField("packID", packID, Field.Store.YES); IList <string> docIDs = new List <string>(); SubDocs subDocs = new SubDocs(packID, docIDs); IList <Document> docsList = new List <Document>(); allSubDocs.Add(subDocs); doc.Add(packIDField); docsList.Add(TestUtil.CloneDocument(doc)); docIDs.Add(doc.Get("docid")); int maxDocCount = TestUtil.NextInt(Random(), 1, 10); while (docsList.Count < maxDocCount) { doc = docs.NextDoc(); if (doc == null) { break; } docsList.Add(TestUtil.CloneDocument(doc)); docIDs.Add(doc.Get("docid")); } outerInstance.addCount.AddAndGet(docsList.Count); Term packIDTerm = new Term("packID", packID); if (delSubDocs != null) { delSubDocs.deleted = true; delIDs.AddAll(delSubDocs.subIDs); outerInstance.delCount.AddAndGet(delSubDocs.subIDs.Count); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": update pack packID=" + delSubDocs.packID + " count=" + docsList.Count + " docs=" + Arrays.ToString(docIDs)); } outerInstance.UpdateDocuments(packIDTerm, docsList); } else { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": add pack packID=" + packID + " count=" + docsList.Count + " docs=" + Arrays.ToString(docIDs)); } outerInstance.AddDocuments(packIDTerm, docsList); } doc.RemoveField("packID"); if (Random().Next(5) == 2) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": buffer del id:" + packID); } toDeleteSubDocs.Add(subDocs); } } else { // Add single doc string docid = doc.Get("docid"); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": add doc docid:" + docid); } outerInstance.AddDocument(new Term("docid", docid), doc); outerInstance.addCount.GetAndIncrement(); if (Random().Next(5) == 3) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": buffer del id:" + doc.Get("docid")); } toDeleteIDs.Add(docid); } } } else { // Update single doc, but we never re-use // and ID so the delete will never // actually happen: if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": update doc id:" + doc.Get("docid")); } string docid = doc.Get("docid"); outerInstance.UpdateDocument(new Term("docid", docid), doc); outerInstance.addCount.GetAndIncrement(); if (Random().Next(5) == 3) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": buffer del id:" + doc.Get("docid")); } toDeleteIDs.Add(docid); } } if (Random().Next(30) == 17) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": apply " + toDeleteIDs.Count + " deletes"); } foreach (string id in toDeleteIDs) { if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": del term=id:" + id); } outerInstance.DeleteDocuments(new Term("docid", id)); } int count = outerInstance.delCount.AddAndGet(toDeleteIDs.Count); if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": tot " + count + " deletes"); } delIDs.AddAll(toDeleteIDs); toDeleteIDs.Clear(); foreach (SubDocs subDocs in toDeleteSubDocs) { Debug.Assert(!subDocs.deleted); delPackIDs.Add(subDocs.packID); outerInstance.DeleteDocuments(new Term("packID", subDocs.packID)); subDocs.deleted = true; if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": del subs: " + subDocs.subIDs + " packID=" + subDocs.packID); } delIDs.AddAll(subDocs.subIDs); outerInstance.delCount.AddAndGet(subDocs.subIDs.Count); } toDeleteSubDocs.Clear(); } if (addedField != null) { doc.RemoveField(addedField); } } catch (Exception t) { Console.WriteLine(Thread.CurrentThread.Name + ": hit exc"); Console.WriteLine(t.ToString()); Console.Write(t.StackTrace); outerInstance.failed.Set(true); throw new Exception(t.ToString(), t); } } if (VERBOSE) { Console.WriteLine(Thread.CurrentThread.Name + ": indexing done"); } outerInstance.DoAfterIndexingThreadDone(); }
public bool AddAll(ICollection <T> c) { return(innerSet.AddAll(getSetCopy(c))); }
public void AddRegistrations(ISet<IRegistrationObject> registrations0) { if (registrations0 != null) { registrations0.AddAll(this.registrations); } }
// // START-InferenceProcedure public InferenceResult ask(FOLKnowledgeBase KB, Sentence alpha) { ISet <Clause> sos = CollectionFactory.CreateSet <Clause>(); ISet <Clause> usable = CollectionFactory.CreateSet <Clause>(); // Usable set will be the set of clauses in the KB, // are assuming this is satisfiable as using the // Set of Support strategy. foreach (Clause cIter in KB.getAllClauses()) { Clause c = cIter; c = KB.standardizeApart(c); c.setStandardizedApartCheckNotRequired(); usable.AddAll(c.getFactors()); } // Ensure reflexivity axiom is added to usable if using paramodulation. if (isUseParamodulation()) { // Reflexivity Axiom: x = x TermEquality reflexivityAxiom = new TermEquality(new Variable("x"), new Variable("x")); Clause reflexivityClause = new Clause(); reflexivityClause.addLiteral(new Literal(reflexivityAxiom)); reflexivityClause = KB.standardizeApart(reflexivityClause); reflexivityClause.setStandardizedApartCheckNotRequired(); usable.Add(reflexivityClause); } Sentence notAlpha = new NotSentence(alpha); // Want to use an answer literal to pull // query variables where necessary Literal answerLiteral = KB.createAnswerLiteral(notAlpha); ISet <Variable> answerLiteralVariables = KB.collectAllVariables(answerLiteral.getAtomicSentence()); Clause answerClause = new Clause(); if (answerLiteralVariables.Size() > 0) { Sentence notAlphaWithAnswer = new ConnectedSentence(Connectors.OR, notAlpha, answerLiteral.getAtomicSentence()); foreach (Clause cIter in KB.convertToClauses(notAlphaWithAnswer)) { Clause c = cIter; c = KB.standardizeApart(c); c.setProofStep(new ProofStepGoal(c)); c.setStandardizedApartCheckNotRequired(); sos.AddAll(c.getFactors()); } answerClause.addLiteral(answerLiteral); } else { foreach (Clause cIter in KB.convertToClauses(notAlpha)) { Clause c = cIter; c = KB.standardizeApart(c); c.setProofStep(new ProofStepGoal(c)); c.setStandardizedApartCheckNotRequired(); sos.AddAll(c.getFactors()); } } // Ensure all subsumed clauses are removed usable.RemoveAll(SubsumptionElimination.findSubsumedClauses(usable)); sos.RemoveAll(SubsumptionElimination.findSubsumedClauses(sos)); OTTERAnswerHandler ansHandler = new OTTERAnswerHandler(answerLiteral, answerLiteralVariables, answerClause, maxQueryTime); IndexedClauses idxdClauses = new IndexedClauses( getLightestClauseHeuristic(), sos, usable); return(otter(ansHandler, idxdClauses, sos, usable)); }