public void Run(IDictionary <string, string> args, ILogger logger) { var dataDirectory = args["dataDirectory"]; var fileName = args["fileName"]; var collection = args["collection"]; var skip = args.ContainsKey("skip") ? int.Parse(args["skip"]) : 0; var take = args.ContainsKey("take") ? int.Parse(args["take"]) : int.MaxValue; var sampleSize = args.ContainsKey("sampleSize") ? int.Parse(args["sampleSize"]) : 1000; var pageSize = args.ContainsKey("pageSize") ? int.Parse(args["pageSize"]) : 100000; var collectionId = collection.ToHash(); var fieldsToStore = new HashSet <string> { "language", "wikibase_item", "title", "text", "url" }; var fieldsToIndex = new HashSet <string> { "title", "text" }; if (take == 0) { take = int.MaxValue; } var model = new BagOfCharsModel(); var payload = WikipediaHelper.ReadWP(fileName, skip, take, fieldsToStore, fieldsToIndex); using (var sessionFactory = new SessionFactory(dataDirectory, logger)) { var debugger = new IndexDebugger(logger, sampleSize); using (var writeSession = new WriteSession(new DocumentWriter(collectionId, sessionFactory))) { foreach (var page in payload.Batch(pageSize)) { using (var indexStream = new WritableIndexStream(collectionId, sessionFactory, logger: logger)) using (var indexSession = new IndexSession <string>(model, model)) { foreach (var document in page) { writeSession.Put(document); foreach (var field in document.IndexableFields) { indexSession.Put(document.Id, field.KeyId, (string)field.Value); } debugger.Step(indexSession); } indexStream.Write(indexSession.GetInMemoryIndex()); //foreach (var column in indexSession.InMemoryIndex) //{ // Print($"wikipedia.{column.Key}", column.Value); //} } } } } }
public void Can_search_filestreamed_with_multiple_pages() { var model = new BagOfCharsModel(); const string collection = "Can_search_streamed_with_one_page_per_document"; var collectionId = collection.ToHash(); const string fieldName = "description"; _sessionFactory.Truncate(collectionId); using (var stream = new WritableIndexStream(collectionId, _sessionFactory)) using (var writeSession = new WriteSession(new DocumentWriter(collectionId, _sessionFactory))) { var keyId = writeSession.EnsureKeyExists(fieldName); for (long i = 0; i < _data.Length; i++) { var data = _data[i]; using (var indexSession = new IndexSession <string>(model, model)) { var doc = new Document(new Field[] { new Field(fieldName, data, index: true, store: true) }); writeSession.Put(doc); indexSession.Put(doc.Id, keyId, data); stream.Write(indexSession.GetInMemoryIndex()); } } } var queryParser = new QueryParser <string>(_sessionFactory, model); using (var searchSession = new SearchSession(_sessionFactory, model, new PostingsReader(_sessionFactory))) { Assert.DoesNotThrow(() => { foreach (var word in _data) { var query = queryParser.Parse(collection, word, fieldName, fieldName, and: true, or: false); var result = searchSession.Search(query, 0, 1); var document = result.Documents.FirstOrDefault(); if (document == null) { throw new Exception($"unable to find {word}."); } if (document.Score < model.IdenticalAngle) { throw new Exception($"unable to score {word}."); } Debug.WriteLine($"{word} matched with {document.Score * 100}% certainty."); } }); } }
public static void WriteWatSegment( string dataDirectory, string fileName, string collection, IModel <string> model, ILogger logger, string refFileName) { var time = Stopwatch.StartNew(); var collectionId = collection.ToHash(); var storeFields = new HashSet <string> { "title", "description", "url", "filename" }; var indexFields = new HashSet <string> { "title", "description", "url" }; using (var sessionFactory = new SessionFactory(dataDirectory, logger)) using (var writeSession = new WriteSession(new DocumentWriter(collectionId, sessionFactory))) using (var indexSession = new IndexSession <string>(model, model)) { using (var queue = new ProducerConsumerQueue <Document>(document => { sessionFactory.Write(document, writeSession, indexSession); })) { foreach (var document in ReadWatFile(fileName, refFileName).Select(dic => new Document( dic.Select(kvp => new Field( kvp.Key, kvp.Value, index: indexFields.Contains(kvp.Key), store: storeFields.Contains(kvp.Key))).ToList()))) { queue.Enqueue(document); } } using (var stream = new WritableIndexStream(collectionId, sessionFactory, logger: logger)) { stream.Write(indexSession.GetInMemoryIndex()); } } logger.LogInformation($"indexed {fileName} in {time.Elapsed}"); }
public void Run(IDictionary <string, string> args, ILogger logger) { var time = Stopwatch.StartNew(); var dataDirectory = args["dataDirectory"]; var collectionId = args["collection"].ToHash(); var images = new MnistReader(args["imageFileName"], args["labelFileName"]).Read(); VectorNode tree; var debugger = new IndexDebugger(logger); var model = new LinearClassifierImageModel(); using (var sessionFactory = new SessionFactory(dataDirectory, logger)) { sessionFactory.Truncate(collectionId); using (var writeSession = new WriteSession(new DocumentWriter(collectionId, sessionFactory))) using (var indexSession = new IndexSession <IImage>(model, model)) { var imageIndexId = writeSession.EnsureKeyExists("image"); foreach (var image in images) { var imageField = new Field("image", image.Pixels, index: true, store: true); var labelField = new Field("label", image.Label, index: false, store: true); var document = new Document(new Field[] { imageField, labelField }); writeSession.Put(document); indexSession.Put(document.Id, imageField.KeyId, image); debugger.Step(indexSession); } var indices = indexSession.GetInMemoryIndex(); tree = indices[imageIndexId]; using (var stream = new WritableIndexStream(collectionId, sessionFactory, logger: logger)) { stream.Write(indices); } } } logger.LogInformation($"indexed {debugger.Steps} mnist images in {time.Elapsed}"); Print(tree); }