void IndexProperties(Document doc, IDictionary <string, string> properties) { if (properties == null) { return; } foreach (var prop in properties) { if (prop.Key == "svn:externals") { doc.Add(_externalsField); _externalsTokenStream.SetText(prop.Value); } else if (prop.Key == "svn:mime-type") { doc.Add(_typeField); _typeField.SetValue(prop.Value); } else if (prop.Key == "svn:mergeinfo") { continue; // don't index } else { doc.Add(new Field(prop.Key, new SimpleTokenStream(prop.Value))); } } }
void IndexDocument(IndexJobData data) { if (_args.Verbosity == 0 && data.Path[0] == '$') { Console.WriteLine("Revision " + data.RevisionFirst); } else { Console.WriteLine("Index {0} {1}:{2}", data.Path, data.RevisionFirst, data.RevisionLast); } string idText = data.Path[0] == '$' ? data.Path : data.Path + "@" + data.RevisionFirst; Term id = _idTerm.CreateTerm(idText); _indexWriter.DeleteDocuments(id); if (_args.SingleRevision && data.RevisionLast != Revision.Head) { return; } Document doc = MakeDocument(); _idField.SetValue(idText); _pathTokenStream.SetText(data.Path); _revFirstField.SetValue(data.RevisionFirst.ToString(RevisionFilter.RevFormat)); _revLastField.SetValue(data.RevisionLast.ToString(RevisionFilter.RevFormat)); _authorField.SetValue(data.Info.Author.ToLowerInvariant()); SetTimestampField(data.Info.Timestamp); _messageTokenStream.SetText(_svn.GetLogMessage(data.RevisionFirst)); if (!data.Info.IsDirectory) { _sizeField.SetValue(PackedSizeConverter.ToSortableString(data.Info.Size)); doc.Add(_sizeField); } _contentTokenStream.SetText(data.Content); if (!_contentTokenStream.IsEmpty) { doc.Add(_contentField); } IndexProperties(doc, data.Properties); _indexWriter.AddDocument(doc); }
static TestIndex() { Directory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, null, true); writer.SetMaxFieldLength(MaxNumberOfTermsPerDocument); var pathTokenStream = new PathTokenStream(""); var contentTokenStream = new SimpleTokenStream(""); var externalsTokenStream = new PathTokenStream(""); Field field_id = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED); Field field_rev_first = new Field(FieldName.RevisionFirst, "", Field.Store.NO, Field.Index.UN_TOKENIZED); Field field_rev_last = new Field(FieldName.RevisionLast, "", Field.Store.NO, Field.Index.UN_TOKENIZED); Document doc = new Document(); doc.Add(field_id); doc.Add(new Field(FieldName.Path, pathTokenStream)); doc.Add(new Field(FieldName.Content, contentTokenStream)); doc.Add(new Field(FieldName.Externals, externalsTokenStream)); doc.Add(field_rev_first); doc.Add(field_rev_last); for (int i = 0; i < Data.GetLength(0); ++i) { string id = Data[i, 1]; field_id.SetValue(id); pathTokenStream.SetText(id); int rev_first = Revision.Head; if (id.StartsWith("/revisions")) { contentTokenStream.SetText(""); externalsTokenStream.SetText(""); rev_first = int.Parse(Data[i, 2]); } else { contentTokenStream.SetText(Data[i, 2]); externalsTokenStream.SetText(Data[i, 3]); } field_rev_first.SetValue(RevisionFieldValue(rev_first)); field_rev_last.SetValue(HeadRevisionFieldValue()); writer.AddDocument(doc); if (id.StartsWith("/revisions") && Data[i, 3] != null) // update last revision { // Change the last revision // Warning: It is not possible to load a document from the index // We have to rebuild/reparse it from the scratch writer.DeleteDocuments(new Term("id", id)); pathTokenStream.SetText(id); contentTokenStream.SetText(""); externalsTokenStream.SetText(""); int rev_last = int.Parse(Data[i, 3]); field_rev_last.SetValue(RevisionFieldValue(rev_last)); id += "@" + rev_first; Data[i, 1] = id; field_id.SetValue(id); writer.AddDocument(doc); } } // delete non existent document test writer.DeleteDocuments(new Term("id", "bliflaiwj123dj33")); writer.Optimize(); writer.Close(); Searcher = new IndexSearcher(directory); Assert.AreEqual(Data.GetLength(0), Searcher.MaxDoc()); // smoke test for index creation }