/// <summary>Scores and collects all matching documents.</summary> /// <param name="collector">The collector to which all matching documents are passed. /// </param> public virtual void Score(Collector collector) { collector.SetScorer(this); int doc; while ((doc = NextDoc()) != NO_MORE_DOCS) { collector.Collect(doc); } }
public void Can_collect_numbers() { var dir = CreateDir(); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <dynamic> { new { _id = "0", title = 5 }, new { _id = "1", title = 4 }, new { _id = "2", title = 3 }, new { _id = "3", title = 2 }, new { _id = "4", title = 1 }, new { _id = "5", title = 0 } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new FullTextUpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long version = writer.Write(); writer.Dispose(); var query = new QueryParser().Parse("title:3"); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(readSession)) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 2)); } //query = new List<QueryContext>{new QueryContext("title", 0, 3) }; query = new QueryParser().Parse("title<3+title>0"); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(readSession)) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(4, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 2)); } }
public void Can_rank_near_term() { var dir = Path.Combine(Setup.Dir, "Can_rank_near_term"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <Dictionary <string, string> > { new Dictionary <string, string> { { "_id", "0" }, { "title", "Gustav Horn, Count of Pori" } }, new Dictionary <string, string> { { "_id", "1" }, { "title", "Port au Port Peninsula" } }, new Dictionary <string, string> { { "_id", "2" }, { "title", "Pore" } }, new Dictionary <string, string> { { "_id", "3" }, { "title", "Born 2.0" } }, new Dictionary <string, string> { { "_id", "4" }, { "title", "P**n" } } }; string indexName; using (var writer = new StreamWriteOperation(dir, new Analyzer(), docs.ToStream())) { indexName = writer.Execute(); } var query = new QueryParser(new Analyzer()).Parse("+title:p**n~"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.That(scores.Count, Is.EqualTo(5)); Assert.IsTrue(scores.First().DocumentId.Equals(4)); Assert.IsTrue(scores[1].DocumentId.Equals(0)); Assert.IsTrue(scores[2].DocumentId.Equals(1)); Assert.IsTrue(scores[3].DocumentId.Equals(3)); Assert.IsTrue(scores[4].DocumentId.Equals(2)); } }
public void Can_collect_prefixed() { var dir = Path.Combine(Setup.Dir, "Can_collect_prefixed"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <Dictionary <string, string> > { new Dictionary <string, string> { { "_id", "0" }, { "title", "rambo" } }, new Dictionary <string, string> { { "_id", "1" }, { "title", "rambo 2" } }, new Dictionary <string, string> { { "_id", "2" }, { "title", "rocky 2" } }, new Dictionary <string, string> { { "_id", "3" }, { "title", "raiders of the lost ark" } }, new Dictionary <string, string> { { "_id", "4" }, { "title", "rain man" } } }; string indexName; using (var writer = new StreamWriteOperation(dir, new Analyzer(), docs.ToStream())) { indexName = writer.Execute(); } using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("title", "ra") { Prefix = true }).ToList(); Assert.That(scores.Count, Is.EqualTo(4)); Assert.IsTrue(scores.Any(d => d.DocumentId == 0)); Assert.IsTrue(scores.Any(d => d.DocumentId == 1)); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
public void Can_collect_exact_phrase_joined_by_and() { var dir = CreateDir(); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <dynamic> { new { _id = "0", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "raiders of the lost ark" }, new { _id = "4", title = "the rain man" }, new { _id = "5", title = "the good, the bad and the ugly" } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new UpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long version = writer.Write(); writer.Dispose(); var query = new QueryParser(new Analyzer()).Parse("+title:the"); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(3, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } query = new QueryParser(new Analyzer()).Parse("+title:the +title:ugly"); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } }
public void PerformanceTest() { var collector = new Collector(""); var time = TimeSpan.FromSeconds(1); var tweaker = new Tweaker(""); tweaker.Tweak(TweakerMode.ConsumeCores, 4); var result = collector.Collect(CollectorMode.Performance, time); System.Diagnostics.Debugger.Break(); // No assert, should not crash }
private void StartButton_OnClick(object s, RoutedEventArgs e) { var bw = new BackgroundWorker(); bw.DoWork += delegate { StartProgress = 0; StartMaximum = Config.Collector.PageTo - Config.Collector.PageFrom; Application.Current.Dispatcher.Invoke( () => this.OnProgressStart(this.StartButton, () => this.StartProgress)); var combo = new Combo(Config.Passwords); var result = new List <string>(); var region = Utility.LongRegionToShort(Config.Collector.Region); if (region != null) { Parallel.For(Config.Collector.PageFrom, Config.Collector.PageTo, i => { try { if (Config.Collector.AddPassword) { var tmp = Collector.Collect(region, i, Config.Collector.FilterSpecialCharacters); result.AddRange(from name in tmp let cbo = combo.Create(name) from c in cbo select name + ":" + c); } else { result.AddRange(Collector.Collect(region, i, Config.Collector.FilterSpecialCharacters)); } } catch { } Application.Current.Dispatcher.Invoke(() => this.StartProgress++); }); File.WriteAllLines( string.Format("accounts{1}-{0:yyyy-MM-dd_hh-mm-ss-tt}_{2}.txt", DateTime.Now, Config.Collector.AddPassword ? "+pw" : string.Empty, result.Count), result); } }; bw.RunWorkerCompleted += (sender, args) => { OnProgressFinish(StartButton, "Start"); }; bw.RunWorkerAsync(); }
public void CollectTest1() { Collector collector = new Collector(); ConfigurationOptions configurationOptions = new ConfigurationOptions(); configurationOptions.VersionOption = true; configurationOptions.GatherType = FileTypesEnum.trace.ToString(); Log.MessageLogged += Log_MessageLogged; configurationOptions.Validate(); int result = collector.Collect(configurationOptions); string results = string.Join <string>(Environment.NewLine, messages.ToArray()); collector.Close(); Assert.IsTrue(results.Contains("CheckReleaseVersion:")); messages.Clear(); }
public void Can_rank_near_phrase() { var dir = Path.Combine(Setup.Dir, "Can_rank_near_phrase"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <Dictionary <string, string> > { new Dictionary <string, string> { { "_id", "0" }, { "title", "Tage Mage" } }, new Dictionary <string, string> { { "_id", "1" }, { "title", "aye-aye" } }, new Dictionary <string, string> { { "_id", "2" }, { "title", "Cage Rage Championships" } }, new Dictionary <string, string> { { "_id", "3" }, { "title", "Page Up and Page Down keys" } }, new Dictionary <string, string> { { "_id", "4" }, { "title", "Golden Age of P**n" } } }; string indexName; using (var writer = new StreamWriteOperation(dir, new Analyzer(), docs.ToStream())) { indexName = writer.Execute(); } var query = new QueryParser(new Analyzer()).Parse("+title:age of p**n~"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.That(scores.Count, Is.EqualTo(5)); Assert.IsTrue(scores.First().DocumentId.Equals(4)); } }
public void Can_collect_prefixed() { var dir = Path.Combine(Dir, "Can_collect_prefixed"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <List <Field> > { new List <Field> { new Field("_id", "0"), new Field("title", "rambo") }, new List <Field> { new Field("_id", "1"), new Field("title", "rambo 2") }, new List <Field> { new Field("_id", "2"), new Field("title", "rocky 2") }, new List <Field> { new Field("_id", "3"), new Field("title", "raiders of the lost ark") }, new List <Field> { new Field("_id", "4"), new Field("title", "rain man") } }; var writer = new DocumentUpsertOperation(dir, new Analyzer(), compression: Compression.QuickLz, primaryKey: "_id", documents: docs); long indexName = writer.Commit(); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("title", "ra") { Prefix = true }).ToList(); Assert.AreEqual(4, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 0)); Assert.IsTrue(scores.Any(d => d.DocumentId == 1)); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
public void Can_collect_exact_phrase_joined_by_not() { var dir = Path.Combine(Dir, "Can_collect_exact_phrase_joined_by_not"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <Field> { new Field(0, "_id", "0"), new Field(0, "title", "rambo first blood"), new Field(1, "_id", "1"), new Field(1, "title", "rambo 2"), new Field(2, "_id", "2"), new Field(2, "title", "rocky 2"), new Field(3, "_id", "3"), new Field(3, "title", "raiders of the lost ark"), new Field(4, "_id", "4"), new Field(4, "title", "the rain man"), new Field(5, "_id", "5"), new Field(5, "title", "the good, the bad and the ugly") }.GroupBy(f => f.DocumentId).Select(g => new Document(g.Key, g.ToList())); var writer = new DocumentUpsertOperation(dir, new Analyzer(), compression: Compression.Lz, primaryKey: "_id", documents: docs); long indexName = writer.Commit(); var query = new QueryParser(new Analyzer()).Parse("+title:the"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(3, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } query = new QueryParser(new Analyzer()).Parse("+title:the -title:ugly"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
public void Can_collect_exact_phrase_joined_by_not() { var dir = Path.Combine(CreateDir(), "Can_collect_exact_phrase_joined_by_not"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <dynamic> { new { _id = "0", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "raiders of the lost ark" }, new { _id = "4", title = "the rain man" }, new { _id = "5", title = "the good, the bad and the ugly" } }.ToDocuments(); var writer = new DocumentsUpsertOperation(dir, new Analyzer(), compression: Compression.Lz, primaryKey: "_id", documents: docs); long indexName = writer.Commit(); var query = new QueryParser(new Analyzer()).Parse("+title:the"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(3, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } query = new QueryParser(new Analyzer()).Parse("+title:the -title:ugly"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
/// <summary> /// Find an element by ID, including or under this element. /// </summary> /// <param name="id">The ID to search for.</param> /// <returns>The first matching element by ID, starting with this element, or null if none found.</returns> /// <remarks> /// Note that this finds the first matching ID, starting with this element. If you search down from a different /// starting point, it is possible to find a different element by ID. For unique element by ID within a Document, /// use <seealso cref="Document.GetElementById(string)"/> /// </remarks> public Element GetElementById(string id) { if (string.IsNullOrEmpty(id)) { throw new ArgumentNullException("id"); } Elements elements = Collector.Collect(new Evaluator.Id(id), this); if (elements.Count > 0) { return(elements[0]); } else { return(null); } }
public void Can_collect_near() { var dir = Path.Combine(Dir, "Can_collect_near"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <Field> { new Field(0, "_id", "0"), new Field(0, "title", "rambo"), new Field(1, "_id", "1"), new Field(1, "title", "rambo 2"), new Field(2, "_id", "2"), new Field(2, "title", "rocky 2"), new Field(3, "_id", "3"), new Field(3, "title", "raiders of the lost ark"), new Field(4, "_id", "4"), new Field(4, "title", "tomb raider") }.GroupBy(f => f.DocumentId).Select(g => new Document(g.Key, g.ToList())); var writer = new DocumentUpsertOperation(dir, new Analyzer(), compression: Compression.Lz, primaryKey: "_id", documents: docs); long indexName = writer.Commit(); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("title", "raider") { Fuzzy = false, Edits = 1 }).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("title", "raider") { Fuzzy = true, Edits = 1 }).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
public void Can_delete() { var dir = CreateDir(); var docs = new List <dynamic> { new { _id = "0", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "raiders of the lost ark" }, new { _id = "4", title = "the rain man" }, new { _id = "5", title = "the good, the bad and the ugly" } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new UpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long version = writer.Write(); writer.Dispose(); using (var factory = new ReadSessionFactory(dir)) using (var readSession = factory.OpenReadSession(version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(new QueryContext("title", "rambo")).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 0)); Assert.IsTrue(scores.Any(d => d.DocumentId == 1)); } var operation = new DeleteByPrimaryKeyCommand(dir, new[] { "0" }); operation.Execute(); using (var factory = new ReadSessionFactory(dir)) using (var readSession = factory.OpenReadSession(version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(new QueryContext("title", "rambo")).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 1)); } }
public void Can_collect_near() { var dir = Path.Combine(CreateDir(), "Can_collect_near"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <dynamic> { new { _id = "0", title = "rambo" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "raiders of the lost ark" }, new { _id = "4", title = "tomb raider" } }.ToDocuments(); var writer = new DocumentsUpsertOperation(dir, new Analyzer(), compression: Compression.Lz, primaryKey: "_id", documents: docs); long indexName = writer.Commit(); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("title", "raider") { Fuzzy = false, Edits = 1 }).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("title", "raider") { Fuzzy = true, Edits = 1 }).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
public override bool Score(Collector collector, int max) { FakeScorer fakeScorer = new FakeScorer(); collector.Scorer = fakeScorer; if (_doc == -1) { _doc = NextDocOutOfOrder(); } while (_doc < max) { fakeScorer.doc = _doc; fakeScorer._score = outerInstance._scores[outerInstance._ords[_scoreUpto]]; collector.Collect(_doc); _doc = NextDocOutOfOrder(); } return(_doc != DocIdSetIterator.NO_MORE_DOCS); }
public void Can_collect_near() { var dir = CreateDir(); var docs = new List <dynamic> { new { _id = "0", title = "rambo" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "raiders of the lost ark" }, new { _id = "4", title = "tomb raider" } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new UpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long version = writer.Write(); writer.Dispose(); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(new QueryContext("title", "raider") { Fuzzy = false, Edits = 1 }).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(new QueryContext("title", "raider") { Fuzzy = true, Edits = 1 }).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
private void Loot(Collector collector) { if (animator) { animator.SetTrigger("Looted"); } if (loot) { collector.Collect(Instantiate(loot)); loot = null; } interactable.action = null; looted = true; if (destroyOnLoot) { Destroy(gameObject); } }
public void Can_collect_exact() { var dir = CreateDir(); var docs = new List <dynamic> { new { _id = "0", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "raiders of the lost ark" }, new { _id = "4", title = "the rain man" }, new { _id = "5", title = "the good, the bad and the ugly" } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new FullTextUpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long version = writer.Write(); writer.Dispose(); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(readSession)) { var query = new QueryParser().Parse("title:'rambo'"); var scores = collector.Collect(query); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 0)); Assert.IsTrue(scores.Any(d => d.DocumentId == 1)); } using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(readSession)) { var query = new QueryParser().Parse("title:'the'"); var scores = collector.Collect(query); Assert.AreEqual(3, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } }
private void DirectoryBox_TextChanged(object sender, EventArgs e) { if (string.IsNullOrWhiteSpace(DirectoryBox.Text)) { DirectoryBox.Text = @".\"; } var collector = new Collector(DirectoryBox.Text); if (!collector.DirectoryInfo.Exists) { return; } collector.Collect(); var list = collector.GetPlayList(); list.Randomize(); if (list.Count < 1) { return; } player.PlayList = list; Task.Run(() => { foreach (var item in player.PlayList.ToList()) { PlayListBox.Invoke(new MethodInvoker(() => { PlayListBox.Items.Add($"{item.Key}. {item.Value.Name}"); })); } PlayListBox.Invoke(new MethodInvoker(() => { PlayListBox.SelectedItem = PlayListBox.Items[0]; })); }); }
public void Can_collect_near_phrase() { var dir = CreateDir(); var docs = new List <dynamic> { new { _id = "0", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "the raid" }, new { _id = "4", title = "the rain man" }, new { _id = "5", title = "the good, the bad and the ugly" } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new UpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long indexName = writer.Write(); writer.Dispose(); var query = new QueryParser(new Analyzer()).Parse("+title:rain man"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } query = new QueryParser(new Analyzer(), 0.75f).Parse("+title:rain man~"); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); Assert.IsTrue(scores.Any(d => d.DocumentId == 4)); } }
public void Can_collect_date_range() { var dir = CreateDir(); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var lowerBound = DateTime.Now; var upperBound = DateTime.Now.AddDays(1); var docs = new List <dynamic> { new { _id = "0", created = DateTime.Now.AddDays(-1) }, new { _id = "1", created = lowerBound }, new { _id = "2", created = upperBound }, new { _id = "3", created = upperBound.AddDays(1) }, new { _id = "4", created = upperBound.AddDays(2) }, new { _id = "5", created = upperBound.AddDays(3) } }.ToDocuments(primaryKeyFieldName: "_id"); var writer = new FullTextUpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs); long version = writer.Write(); writer.Dispose(); //query = new List<QueryContext> { new QueryContext("created", lowerBound, upperBound) }; var query = new QueryParser().Parse("created>\\" + lowerBound + "\\+created<\\" + upperBound + "\\"); using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(readSession)) { var scores = collector.Collect(query).ToList(); Assert.AreEqual(2, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 1)); Assert.IsTrue(scores.Any(d => d.DocumentId == 2)); } }
private void CollectHit(Collector collector, Collector[] sidewaysCollectors) { //if (DEBUG) { // System.out.println(" hit"); //} collector.Collect(collectDocID); if (drillDownCollector != null) { drillDownCollector.Collect(collectDocID); } // TODO: we could "fix" faceting of the sideways counts // to do this "union" (of the drill down hits) in the // end instead: // Tally sideways counts: for (int dim = 0; dim < sidewaysCollectors.Length; dim++) { sidewaysCollectors[dim].Collect(collectDocID); } }
internal static void Terminate() { for (int loop = 0; loop < 3; loop++) { SoundSources.DestroyAll(); Texture2Ds.DestroyAll(); CubemapTextures.DestroyAll(); Shader2Ds.DestroyAll(); Material2Ds.DestroyAll(); Effects.DestroyAll(); Meshs.DestroyAll(); Deformers.DestroyAll(); Models.DestroyAll(); KeyframeAnimations.DestroyAll(); AnimationSources.DestroyAll(); Scenes.DestroyAll(); Layer2Ds.DestroyAll(); Object2Ds.DestroyAll(); Layer3Ds.DestroyAll(); Object3Ds.DestroyAll(); PostEffects.DestroyAll(); //Profilers.DestroyAll(); Collector.Collect(); System.GC.Collect(); System.GC.WaitForPendingFinalizers(); System.GC.Collect(); Collector.Collect(); } }
public void Can_collect_by_id() { var dir = Path.Combine(CreateDir(), "Can_collect_by_id"); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } var docs = new List <dynamic> { new { _id = "abc0123", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "the raiders of the lost ark" }, new { _id = "four", title = "the rain man" }, new { _id = "5five", title = "the good, the bad and the ugly" } }.ToDocuments(); var writer = new DocumentsUpsertOperation(dir, new Analyzer(), compression: Compression.Lz, primaryKey: "_id", documents: docs); long indexName = writer.Commit(); using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("_id", "3")).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); } using (var collector = new Collector(dir, IxInfo.Load(Path.Combine(dir, indexName + ".ix")), new Tfidf())) { var scores = collector.Collect(new QueryContext("_id", "5five")).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } }
public void Can_collect_by_id() { var dir = CreateDir(); var docs = new List <dynamic> { new { _id = "abc0123", title = "rambo first blood" }, new { _id = "1", title = "rambo 2" }, new { _id = "2", title = "rocky 2" }, new { _id = "3", title = "the raiders of the lost ark" }, new { _id = "four", title = "the rain man" }, new { _id = "5five", title = "the good, the bad and the ugly" } }.ToDocuments(primaryKeyFieldName: "_id"); long version; using (var writer = new UpsertTransaction(dir, new Analyzer(), compression: Compression.Lz, documents: docs)) { version = writer.Write(); } using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(new QueryContext("_id", "3")).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 3)); } using (var readSession = CreateReadSession(dir, version)) using (var collector = new Collector(dir, readSession)) { var scores = collector.Collect(new QueryContext("_id", "5five")).ToList(); Assert.AreEqual(1, scores.Count); Assert.IsTrue(scores.Any(d => d.DocumentId == 5)); } }
// Update is called once per frame void Update() { ammunitionText.text = "Ammunition: " + ammunition.ToString(); if (Input.GetButtonDown("Fire1") && Time.time > nextFire && ammunition > 0) { ammunition--; nextFire = Time.time + fireTimer; StartCoroutine(ShotEffect()); System.Console.WriteLine("shoot"); Vector3 rayOrigin = fpsCam.ViewportToWorldPoint(new Vector3(0.5f, 0.5f, 0)); laserLine.SetPosition(0, gunEnd.position); RaycastHit hit; if (Physics.Raycast(rayOrigin, fpsCam.transform.forward, out hit, weaponRange)) { laserLine.SetPosition(1, hit.point); if (hit.rigidbody != null) { hit.rigidbody.AddForce(-hit.normal * hitForce); RespawnScript rs = hit.transform.gameObject.GetComponent <RespawnScript>(); if (rs != null) { col.Collect(rs.type); this.StartCoroutine(rs.RespawnEffect()); rs.gameObject.SetActive(false); } } } else { laserLine.SetPosition(1, rayOrigin + (fpsCam.transform.forward * weaponRange)); } laserLine.SetPosition(1, hit.point); } }
public ProcessOutput ExecuteTest() { lock (_executing) { Log.Close(); Log.Open(); Log.Info("enter"); SaveTempOptions(); ConfigurationOptions config = new ConfigurationOptions(TempArgs, true); Collector collector = new Collector(); Assert.IsNotNull(collector); BeforeTest(); Log.Info(">>>>Starting test<<<<\r\n", ConfigurationOptions); // cant call with args // // populate default collectsfdata.options.json int result = collector.Collect(); //int result = program.Collect(new string[] { }); Log.Info(">>>>test result<<<<", result); ProcessOutput output = AfterTest(); Assert.IsNotNull(output); if (result != 0) { Log.Error($"result {result}"); output.ExitCode = result; } WriteConsole($">>>>test result<<<<", output); return(output); } }
public /*protected internal*/ override bool Score(Collector collector, int max, int firstDocID) { doc = firstDocID; collector.SetScorer(this); while (doc < max) { collector.Collect(doc); doc = countingSumScorer.NextDoc(); } return doc != NO_MORE_DOCS; }
private static void InitModules() { Collector.Collect(); Console.WriteLine("Initiating Modules"); fishing = new AutoFish(); fishing.Enabled = Settings.AutoFish.Enabled; fishing.FishDataLog = Settings.AutoFish.FishDataLog; fishing.HighLatencyMode = Settings.AutoFish.HighLatencyMode; fishing.PredictMode = Settings.AutoFish.PredictMode; fishing.catchGrade = Settings.AutoFish.catchGrade; fishing.itemIdFilter_White = Settings.AutoFish.itemIdFilter_White; fishing.itemIdFilter_Green = Settings.AutoFish.itemIdFilter_Green; fishing.itemIdFilter_Blue = Settings.AutoFish.itemIdFilter_Blue; fishing.itemIdFilter_Yellow = Settings.AutoFish.itemIdFilter_Yellow; fishing.familyNameWhiteList = Settings.AutoFish.familyNameWhiteList; workerRestore = new AutoRestore(); workerRestore.Enabled = Settings.AutoRestore.Enabled; hackUi = new UI(); hackUi.Enabled = Settings.UIHack.Enabled; autoPot = new AutoPotion(); autoPot.Enabled = Settings.AutoPotion.Enabled; autoPot.HPPercent = Settings.AutoPotion.HPPercent; autoPot.MPPercent = Settings.AutoPotion.MPPercent; autoItemRegister = new AutoItemRegister(); autoItemRegister.Enabled = Settings.AutoItemRegister.Enabled; autoItemRegister.Filters = Settings.AutoItemRegister.Items; autoItemBuy = new AutoItemBuy(); autoItemBuy.Enabled = Settings.AutoItemBuy.Enabled; autoItemBuy.Filters = Settings.AutoItemBuy.Items; speedHack = new SpeedHack(); speedHack.GhillieMode = Settings.SpeedHack.GhillieMode; speedHack.Horse = new SpeedHack.SpeedHackActor( Settings.SpeedHack.Horse.Enabled, Settings.SpeedHack.Horse.Accel, Settings.SpeedHack.Horse.Speed, Settings.SpeedHack.Horse.Turn, Settings.SpeedHack.Horse.Brake, Settings.SpeedHack.Horse.DefaultAccel, Settings.SpeedHack.Horse.DefaultSpeed, Settings.SpeedHack.Horse.DefaultTurn, Settings.SpeedHack.Horse.DefaultBrake); speedHack.Ship = new SpeedHack.SpeedHackActor( Settings.SpeedHack.Ship.Enabled, Settings.SpeedHack.Ship.Accel, Settings.SpeedHack.Ship.Speed, Settings.SpeedHack.Ship.Turn, Settings.SpeedHack.Ship.Brake, Settings.SpeedHack.Ship.DefaultAccel, Settings.SpeedHack.Ship.DefaultSpeed, Settings.SpeedHack.Ship.DefaultTurn, Settings.SpeedHack.Ship.DefaultBrake); speedHack.Player = new SpeedHack.SpeedHackPlayerActor( Settings.SpeedHack.Player.Enabled, Settings.SpeedHack.Player.Movement, Settings.SpeedHack.Player.Attack, Settings.SpeedHack.Player.Cast, Settings.SpeedHack.Player.AdvancedMode, Settings.SpeedHack.Player.Factor); speedHack.familyNameWhiteList = Settings.AutoFish.familyNameWhiteList; autoProcessing = new AutoProcessing(); autoProcessing.Enabled = Settings.AutoProcessing.Enabled; autoProcessing.Items = Settings.AutoProcessing.Items; hackNavigation = new Navigation(); hackNavigation.SetFlags(); if (Overlay != null) { Overlay.Enabled = Settings.Overlay.Enabled; Overlay.settingsActors = Settings.Overlay.Actors; Overlay.GoldenChests = Settings.Overlay.GoldenChests; } }
static void mThread() { System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); customCulture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; Collector.Collect(); ActivityLog(); LoadStaticSpawnData(); InitSystemVariables(); LoadLuaMod(); InitModules(); while (true) { Collector.Collect(); GetHotKeys(); if (Collector.requestReload) { InitModules(); LoadLuaMod(); } if (!fishing.isFishing && !_autoItemBuyTrigger) { Console.Title = String.Format("X:{0} Y:{1} Z:{2}", Collection.Actors.Local.PlayerData.WorldPosition[0], Collection.Actors.Local.PlayerData.WorldPosition[1], Collection.Actors.Local.PlayerData.WorldPosition[2]); } workerRestore.Run(); hackUi.Run(); speedHack.Run(); autoPot.Run(); autoItemRegister.Run(); if (_autoItemBuyTrigger && !autoItemBuy.ItemMarket.isVisible()) { _autoItemBuyTrigger = false; } autoItemBuy.Run(_autoItemBuyTrigger); autoItemBuy.PostStats(); autoProcessing.Run(); fishing.Run(Collector.requestReload); fishing.PostStats(); if (Settings.Overlay.Enabled) { Overlay._draw = true; while (Overlay._draw) { Thread.Sleep(1); } } } }
/// <summary> /// Find all elements under this element (including self, and children of children). /// </summary> /// <returns>all elements</returns> public Elements GetAllElements() { return(Collector.Collect(new Evaluator.AllElements(), this)); }
/// <summary> Expert: Collects matching documents in a range. Hook for optimization. /// Note, <paramref name="firstDocID" /> is added to ensure that <see cref="DocIdSetIterator.NextDoc()" /> /// was called before this method. /// /// </summary> /// <param name="collector">The collector to which all matching documents are passed. /// </param> /// <param name="max">Do not score documents past this. /// </param> /// <param name="firstDocID"> /// The first document ID (ensures <see cref="DocIdSetIterator.NextDoc()" /> is called before /// this method. /// </param> /// <returns> true if more matching documents may remain. /// </returns> public /*protected internal*/ virtual bool Score(Collector collector, int max, int firstDocID) { collector.SetScorer(this); int doc = firstDocID; while (doc < max) { collector.Collect(doc); doc = NextDoc(); } return doc != NO_MORE_DOCS; }
/// <summary>Scores and collects all matching documents.</summary> /// <param name="collector">The collector to which all matching documents are passed through. /// </param> public override void Score(Collector collector) { collector.SetScorer(this); while ((doc = countingSumScorer.NextDoc()) != NO_MORE_DOCS) { collector.Collect(doc); } }