public void Apply(TKey key, TWindow window, IEnumerable <TElement> elements, ICollector <TElement> output) { foreach (var element in elements) { output.Collect(element); } }
private void CollectHit(ICollector collector, ICollector[] sidewaysCollectors, ICollector[] sidewaysCollectors2) { //if (DEBUG) { // System.out.println(" hit"); //} collector.Collect(collectDocID); if (drillDownCollector != null) { drillDownCollector.Collect(collectDocID); } // TODO: we could "fix" faceting of the sideways counts // to do this "union" (of the drill down hits) in the // end instead: // Tally sideways counts: for (int i = 0; i < sidewaysCollectors.Length; i++) { sidewaysCollectors[i].Collect(collectDocID); } for (int i = 0; i < sidewaysCollectors2.Length; i++) { sidewaysCollectors2[i].Collect(collectDocID); } }
public override void Replay(ICollector other) { ReplayInit(other); int curUpto = 0; int curbase = 0; int chunkUpto = 0; m_curDocs = EMPTY_INT32_ARRAY; foreach (SegStart seg in m_cachedSegs) { other.SetNextReader(seg.ReaderContext); while (curbase + curUpto < seg.End) { if (curUpto == m_curDocs.Length) { curbase += m_curDocs.Length; m_curDocs = m_cachedDocs[chunkUpto]; chunkUpto++; curUpto = 0; } other.Collect(m_curDocs[curUpto++]); } } }
private void CollectNearMiss(ICollector sidewaysCollector) { //if (DEBUG) { // System.out.println(" missingDim=" + dim); //} sidewaysCollector.Collect(collectDocID); }
public override bool Score(ICollector collector, int maxDoc) { // the normalization trick already applies the boost of this query, // so we can use the wrapped scorer directly: collector.SetScorer(scorer); if (scorer.DocID == -1) { scorer.NextDoc(); } while (true) { int scorerDoc = scorer.DocID; if (scorerDoc < maxDoc) { if (filterBits.Get(scorerDoc)) { collector.Collect(scorerDoc); } scorer.NextDoc(); } else { break; } } return(scorer.DocID != Scorer.NO_MORE_DOCS); }
public override void FlatMap(Event @event, ICollector <Alert> output) { // get the current state for the key (source address) // if no state exists, yet, the state must be the state machine's initial state var state = _currentState.Value ?? State.Initial; // ask the state machine what state we should go to based on the given event type var nextState = state.Transition(@event.Type); if (nextState == State.InvalidTransition) { // the current event resulted in an invalid transition // raise an alert! output.Collect(new Alert(@event.SourceAddress, state, @event.Type)); } else if (nextState.IsTerminal) { // we reached a terminal state, clean up the current state _currentState.Clear(); } else { // remember the new state _currentState.Value = nextState; } }
public virtual void Collect(int doc) { if (scorer.GetScore() > 0) { c.Collect(doc); } }
public virtual void Collect(int doc) { m_in.Collect(doc); if (++numCollected >= m_segmentTotalCollect) { throw new CollectionTerminatedException(); } }
public void FlatMap(string sentence, ICollector <WordWithCount> output) { var words = sentence.Split("\\s"); foreach (var word in words) { output.Collect(new WordWithCount(word, 1L)); } }
public void FlatMap(string sentence, ICollector <Tuple <string, int> > output) { var words = sentence.Split(" "); foreach (var word in words) { output.Collect(new Tuple <string, int>(word, 1)); } }
internal static bool ScoreRange(ICollector collector, Scorer scorer, int currentDoc, int end) { while (currentDoc < end) { collector.Collect(currentDoc); currentDoc = scorer.NextDoc(); } return(currentDoc != DocIdSetIterator.NO_MORE_DOCS); }
public virtual void Collect(int doc) { if (inOrder || !AcceptsDocsOutOfOrder) { Debug.Assert(doc > lastCollected, "Out of order : " + lastCollected + " " + doc); } @in.Collect(doc); lastCollected = doc; }
internal static void ScoreAll(ICollector collector, Scorer scorer) { int doc; while ((doc = scorer.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { collector.Collect(doc); } }
private void OnCollisionEnter2D(Collision2D collision) { IPickup pickup = collision.rigidbody?.GetComponent <IPickup>(); if (pickup != null) { m_starCollector.Collect(pickup); } }
public void Move(Direction direction) { int currentVallue = _coinContainer.CoinCount; if (_previousValue > currentVallue) { _previousValue = currentVallue; _collector.Collect(); } }
public void Flush() { Shuffle(); for (int i = 0; i < buffered; ++i) { fakeScorer.doc = docIDs[i]; fakeScorer.freq = freqs[i]; fakeScorer.score = scores[i]; @in.Collect(fakeScorer.DocID); } buffered = 0; }
public virtual void Collect(int doc) { if (inOrder || !AcceptsDocsOutOfOrder) { if (Debugging.AssertsEnabled) { Debugging.Assert(doc > lastCollected, "Out of order : {0} {1}", lastCollected, doc); } } @in.Collect(doc); lastCollected = doc; }
public override bool Score(ICollector c, int maxDoc) { Debug.Assert(doc == -1); doc = 3000; FakeScorer fs = new FakeScorer(); fs.doc = doc; fs.score = 1.0f; c.SetScorer(fs); c.Collect(3000); return(false); }
/// <summary> /// Resolve data from a list of datasources, these /// could be from various locations or the same. /// </summary> /// <param name="sources">All data sources to load.</param> /// <returns>A list of all resolved data from the data sources.</returns> public List <ResolvedData> Resolve(List <DataSource> sources) { IEnumerable <ResolvedData> data = new List <ResolvedData>(); foreach (DataSource source in sources) { ICollector collector = ResolveCollector(source.Collector); List <ResolvedData> resolvedData = collector.Collect(source.Source); data = data.Concat(resolvedData); } return(data.ToList()); }
public virtual void Collect(int doc) { IDictionary <Query, float?> freqs = new Dictionary <Query, float?>(); foreach (KeyValuePair <Query, Scorer> ent in SubScorers) { Scorer value = ent.Value; int matchId = value.DocID; freqs[ent.Key] = matchId == doc ? value.Freq : 0.0f; } DocCounts[doc + DocBase] = freqs; Other.Collect(doc); }
private void TryCollectMetrics() { _systeUsageCollector?.Collect(); if (_metricCollectorTypes is null) { return; } foreach (var type in _metricCollectorTypes) { var collector = (ICollector)(_serviceProvider.GetService(type) ?? throw new Exception($"Metric collector of type {type.FullName} has not been registered in DI")); collector.Collect(); } }
// POST: api/Crawl public IHttpActionResult Post([FromBody] FormData data) { try { Console.WriteLine($"POST: data: {data}"); // I was planning to build a factory over Collectors so you could pass it a location // it would figure out if it was webpage, a file etc and give you the appropriate collector // to collect and parse the data. Sadly just ran out of time, (wife is looking very angry right now!) // var words = _collector.Collect(data.url); // Save the words to the DB. For performance this will use SqlBulkCopy to a temporary table, I didn't get chance to // add a TVP onto it, if I did that would be as fast as we can reasonably go without resorting to elaborate solutions. // Once loaded into the temp table an Upsert / blind update occurs from the temporary to the main table in one transaction // this will update existing records counts by adding on the count from the temporary table. For any new words not // previously known about the record is inserted. // // The reason why this project is .NET 4.7 rather than Core (which admittedly would have been far easier) is that I wanted // to use SQL Always Encrypted feature, if not familiar it encrypts in the application without the app knowing other than // the SQL connection having "Always Encrypted = true". This encryption is assymetric and it works nicely with Azure Vault. // // I additionally decided to go for a EF Code First approach, I'm not a fan of SQL littering the domain as its a maintenance // nightmare and just generally can trip you up as you adjust your model. To make Code First approach work with Always // Encrypted requires Manual DB Migrations, see CountVonCountMigrateDbInitializer.cs. This is nice as it can do an initial // generation of the DB then you have control as a dev how you want to tune your DB, it's close to what I've used in a number // of companies and seems to work fairly well. // Apologies for the EntityFrameworkRepository, I was having a bit too much fun and started building a completely generic // repository capable of handling any entity. I soon realised some poor soul has to review this without me there to explain so // reined it in. I'm particularly proud of the Get method on that repo, its very flexible allowing most kind of searches to be // specified by the caller rather than necessitating an interface change or giving away the crown jewels (direct DB access). _repository.Save(words); return(Ok(data.url)); } catch (Exception e) { // Need to add a proper logger, planning to use PostSharp, really like the aspected orientated nature of the library Console.WriteLine($@"Failed : {e.Message}"); return(InternalServerError(e)); } }
public override bool Score(ICollector collector, int max) { FakeScorer fakeScorer = new FakeScorer(); collector.SetScorer(fakeScorer); if (_doc == -1) { _doc = NextDocOutOfOrder(); } while (_doc < max) { fakeScorer.doc = _doc; fakeScorer._score = outerInstance._scores[outerInstance._ords[_scoreUpto]]; collector.Collect(_doc); _doc = NextDocOutOfOrder(); } return(_doc != DocIdSetIterator.NO_MORE_DOCS); }
private List <Peg> TryToProcessInput() { var processedInput = new List <Peg>(); try { _display.Display(ClientConstants.GuessesLeft + _game.RemainingTurns + ClientConstants.PromptGuess); var input = _collector.Collect(); processedInput = _processor.Process(input); _collectingInputIsComplete = true; } catch (Exception e) { _display.DisplayError(e.Message); } return(processedInput); }
public virtual void TestCollector() { // Tests that the collector delegates calls to input collectors properly. // Tests that the collector handles some null collectors well. If it // doesn't, an NPE would be thrown. DummyCollector[] dcs = new DummyCollector[] { new DummyCollector(), new DummyCollector() }; ICollector c = MultiCollector.Wrap(dcs); Assert.IsTrue(c.AcceptsDocsOutOfOrder); c.Collect(1); c.SetNextReader(null); c.SetScorer(null); foreach (DummyCollector dc in dcs) { Assert.IsTrue(dc.AcceptsDocsOutOfOrderCalled); Assert.IsTrue(dc.CollectCalled); Assert.IsTrue(dc.SetNextReaderCalled); Assert.IsTrue(dc.SetScorerCalled); } }
public virtual void TestNullCollectors() { // Tests that the collector rejects all null collectors. try { MultiCollector.Wrap(null, null); Assert.Fail("only null collectors should not be supported"); } catch (Exception e) when(e.IsIllegalArgumentException()) { // expected } // Tests that the collector handles some null collectors well. If it // doesn't, an NPE would be thrown. ICollector c = MultiCollector.Wrap(new DummyCollector(), null, new DummyCollector()); Assert.IsTrue(c is MultiCollector); Assert.IsTrue(c.AcceptsDocsOutOfOrder); c.Collect(1); c.SetNextReader(null); c.SetScorer(null); }
public virtual void Collect(int doc) { collector.Collect(doc); }
public override bool Score(ICollector collector, int max) { collector.SetScorer(new FakeScorer()); collector.Collect(0); return(false); }
/// <summary> /// Calculates the modified unigram precision score as described in section 2.1 of the paper. /// </summary> /// <param name="reference">The reference as a collector.</param> /// <param name="candidate">The candidate as a collector.</param> /// <returns>The Modified n-gram precision score.</returns> public double ModifiedUnigramPrecision(ICollector <string> reference, ICollector <string> candidate) { var referenceDist = new FrequencyDistribution <string>(reference.Collect()); return(referenceDist.MostFrequentValue() / (double)candidate.Size()); }
public override bool Score(ICollector collector, int max) { bool more; Bucket tmp; FakeScorer fs = new FakeScorer(); // The internal loop will set the score and doc before calling collect. collector.SetScorer(fs); do { bucketTable.first = null; while (current != null) // more queued { // check prohibited & required if ((current.Bits & PROHIBITED_MASK) == 0) { // TODO: re-enable this if BQ ever sends us required // clauses //&& (current.bits & requiredMask) == requiredMask) { // NOTE: Lucene always passes max = // Integer.MAX_VALUE today, because we never embed // a BooleanScorer inside another (even though // that should work)... but in theory an outside // app could pass a different max so we must check // it: if (current.Doc >= max) { tmp = current; current = current.Next; tmp.Next = bucketTable.first; bucketTable.first = tmp; continue; } if (current.Coord >= minNrShouldMatch) { fs.score = (float)(current.Score * coordFactors[current.Coord]); fs.doc = current.Doc; fs.freq = current.Coord; collector.Collect(current.Doc); } } current = current.Next; // pop the queue } if (bucketTable.first != null) { current = bucketTable.first; bucketTable.first = current.Next; return(true); } // refill the queue more = false; end += BucketTable.SIZE; for (SubScorer sub = scorers; sub != null; sub = sub.Next) { if (sub.More) { sub.More = sub.Scorer.Score(sub.Collector, end); more |= sub.More; } } current = bucketTable.first; } while (current != null || more); return(false); }