// Returns key value pairs given a string of the following format: // key1=value1, key2=value2,key3="values 3, 4, and 5" etc. // returns null upon error. // supports both single and double-quoted strings. public static Dictionary <string, string> Parse(string raw) { SimpleTokenStream stream = new SimpleTokenStream(raw); Dictionary <string, string> output = new Dictionary <string, string>(); try { while (stream.HasMore) { if (output.Count > 0) { stream.SkipWhitespace(); stream.PopExpected(','); } stream.SkipWhitespace(); string key = stream.PopPossiblyQuotedToken('='); stream.SkipWhitespace(); stream.PopExpected('='); stream.SkipWhitespace(); string value = stream.PopPossiblyQuotedToken(','); output[key] = value; // skip whitespace before .HasMore check. stream.SkipWhitespace(); } } catch (InvalidOperationException) { return(null); } return(output); }
public void SimpleTokenStream() { var ts = new SimpleTokenStream(@"Hullebulle * trallala #include <bli\bla\blub>"); var t = new Token(); Assert.That(NextToken(ts, t), Is.EqualTo("HULLEBULLE")); Assert.That(NextToken(ts, t), Is.EqualTo("TRALLALA")); Assert.That(NextToken(ts, t), Is.EqualTo("INCLUDE")); Assert.That(NextToken(ts, t), Is.EqualTo("BLI")); Assert.That(NextToken(ts, t), Is.EqualTo("BLA")); Assert.That(NextToken(ts, t), Is.EqualTo("BLUB")); Assert.That(NextToken(ts, t), Is.Null); }
public string[] GetFragments(Query query, Hits hits) { var highlighter = CreateHighlighter(query); var result = new string[hits.Length()]; for (var i = 0; i < result.Length; i++) { var size = PackedSizeConverter.FromSortableString(hits.Doc(i).Get(FieldName.Size)); var loc = hits.Doc(i).Get(FieldName.Id).Split('@'); var info = Svn.GetPathInfo(loc[0], Convert.ToInt32(loc[1])); if (info.IsDirectory) { continue; } var text = Svn.GetPathContent(loc[0], Convert.ToInt32(loc[1]), size); var tokens = new SimpleTokenStream(text); result[i] = GetFragments(highlighter, tokens, text); } return(result); }
public void Null() { SimpleTokenStream ts = new SimpleTokenStream(null); Assert.IsNull(ts.Next()); }
public void Empty() { SimpleTokenStream ts = new SimpleTokenStream(""); Assert.IsNull(ts.Next()); }
static TestIndex() { Directory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, null, true); writer.SetMaxFieldLength(MaxNumberOfTermsPerDocument); var pathTokenStream = new PathTokenStream(""); var contentTokenStream = new SimpleTokenStream(""); var externalsTokenStream = new PathTokenStream(""); Field field_id = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED); Field field_rev_first = new Field(FieldName.RevisionFirst, "", Field.Store.NO, Field.Index.UN_TOKENIZED); Field field_rev_last = new Field(FieldName.RevisionLast, "", Field.Store.NO, Field.Index.UN_TOKENIZED); Document doc = new Document(); doc.Add(field_id); doc.Add(new Field(FieldName.Path, pathTokenStream)); doc.Add(new Field(FieldName.Content, contentTokenStream)); doc.Add(new Field(FieldName.Externals, externalsTokenStream)); doc.Add(field_rev_first); doc.Add(field_rev_last); for (int i = 0; i < Data.GetLength(0); ++i) { string id = Data[i, 1]; field_id.SetValue(id); pathTokenStream.SetText(id); int rev_first = Revision.Head; if (id.StartsWith("/revisions")) { contentTokenStream.SetText(""); externalsTokenStream.SetText(""); rev_first = int.Parse(Data[i, 2]); } else { contentTokenStream.SetText(Data[i, 2]); externalsTokenStream.SetText(Data[i, 3]); } field_rev_first.SetValue(RevisionFieldValue(rev_first)); field_rev_last.SetValue(HeadRevisionFieldValue()); writer.AddDocument(doc); if (id.StartsWith("/revisions") && Data[i, 3] != null) // update last revision { // Change the last revision // Warning: It is not possible to load a document from the index // We have to rebuild/reparse it from the scratch writer.DeleteDocuments(new Term("id", id)); pathTokenStream.SetText(id); contentTokenStream.SetText(""); externalsTokenStream.SetText(""); int rev_last = int.Parse(Data[i, 3]); field_rev_last.SetValue(RevisionFieldValue(rev_last)); id += "@" + rev_first; Data[i, 1] = id; field_id.SetValue(id); writer.AddDocument(doc); } } // delete non existent document test writer.DeleteDocuments(new Term("id", "bliflaiwj123dj33")); writer.Optimize(); writer.Close(); Searcher = new IndexSearcher(directory); Assert.AreEqual(Data.GetLength(0), Searcher.MaxDoc()); // smoke test for index creation }