public int Get(string path) { int revision; lock (_highest) { if (_highest.TryGetValue(path, out revision)) { return(revision); } } if (Reader == null) { return(0); } path += "@"; TermEnum t = Reader.Terms(new Term(FieldName.Id, path)); int doc = -1; while (t.Term() != null && t.Term().Text().StartsWith(path)) { int r = int.Parse(t.Term().Text().Substring(path.Length)); if (r > revision) { revision = r; TermDocs d = Reader.TermDocs(t.Term()); d.Next(); doc = d.Doc(); } t.Next(); } t.Close(); if (revision != 0 && Reader.Document(doc).Get(FieldName.RevisionLast) != Revision.HeadString) { return(0); } return(revision); }
public void End(bool shouldClose) { if (!_is_started) { return; } if (!shouldClose) { return; } //build 2del file list if (!_job_status.Cancelled) { TermEnum term_enum = _index_reader.Terms(); Term path_term = new Term("path"); int nb_terms = 0; while (term_enum.SkipTo(path_term)) //skip to new term equal or *ABOVE* "path:" !!! { Term term = term_enum.Term(); if (term.Field() != path_term.Field()) { break; } if (!File.Exists(term.Text())) { _del_file_list.Add(term.Text()); } if (_job_status.Cancelled) { break; } nb_terms++; } term_enum.Close(); Logger.Log.Info("update: deletion: {0} analyzed terms, found {1} vanished files.", nb_terms, _del_file_list.Count); } _index_searcher.Close(); _index_reader.Close(); //--- deleting deprecated if ((_del_file_list.Count > 0) && (!_job_status.Cancelled)) { Stopwatch watch = new Stopwatch(); watch.Start(); int num_file = 0; int nb_files = _del_file_list.Count; IndexWriter writer = new IndexWriter(_index_path, _default_analyzer, false); foreach (string path in _del_file_list) { if (((num_file++) % 101) == 1) { int progress = ((((num_file++) + 1)) * 100) / nb_files; _job_status.Progress = progress; _job_status.Description = String.Format("upd: removing (from index) file {0}/{1} - {2}", num_file, _del_file_list.Count, StringFu.TimeSpanToString(new TimeSpan((long)(watch.ElapsedMilliseconds) * 10000))); } if (_job_status.Cancelled) { break; } writer.DeleteDocuments(new Term("path", path)); } writer.Commit(); writer.Close(); watch.Stop(); } //adding new files if ((_add_file_list.Count > 0) && (!_job_status.Cancelled)) { Stopwatch watch = new Stopwatch(); watch.Start(); IndexWriter writer = null; try { writer = new IndexWriter(_index_path, _default_analyzer, false, new IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); int num_file = 0; int nb_files = _add_file_list.Count; foreach (BasicFileInfo fi in _add_file_list) { if (((num_file++) % 101) == 1) { int progress = ((((num_file++) + 1)) * 100) / nb_files; _job_status.Progress = progress; _job_status.Description = String.Format("upd: indexing new file {0}/{1} - {2}", num_file, _add_file_list.Count, StringFu.TimeSpanToString(new TimeSpan((long)(watch.ElapsedMilliseconds) * 10000))); } if (_job_status.Cancelled) { break; } writer.AddDocument(_doc_factory.CreateFromPath(fi.FilePath, fi.LastModification)); if (num_file % 20 == 0) { writer.Commit(); } } writer.Commit(); } catch (System.Exception ex) { Log.Error(ex); } finally { if (writer != null) { writer.Close(); writer = null; } } watch.Stop(); } //updating modified files if ((_upd_file_list.Count > 0) && (!_job_status.Cancelled)) { Stopwatch watch = new Stopwatch(); watch.Start(); int num_file = 0; int nb_files = _upd_file_list.Count; IndexWriter writer = null; try { writer = new IndexWriter(_index_path, _default_analyzer, false, new IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)); foreach (BasicFileInfo fi in _upd_file_list) { if (((num_file++) % 101) == 1) { int progress = ((((num_file++) + 1)) * 100) / nb_files; _job_status.Progress = progress; _job_status.Description = String.Format("upd: modified file {0}/{1} - {2}", num_file, _upd_file_list.Count, StringFu.TimeSpanToString(new TimeSpan((long)(watch.ElapsedMilliseconds) * 10000))); } if (_job_status.Cancelled) { break; } writer.UpdateDocument(new Term("path", fi.FilePath), _doc_factory.CreateFromPath(fi.FilePath, fi.LastModification)); } writer.Commit(); //LittleBeagle.Properties.Settings.Default.NbIndexedFiles = num_file; } catch (System.Exception ex) { Log.Error(ex); } finally { if (writer != null) { writer.Close(); writer = null; } } watch.Stop(); } }
public void Close() { termEnum.Close(); }
protected internal override System.Object CreateValue(IndexReader reader, Entry entryKey) { System.String field = StringHelper.Intern(entryKey.field); int[] retArray = new int[reader.MaxDoc]; System.String[] mterms = new System.String[reader.MaxDoc + 1]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); int t = 0; // current term number // an entry for documents that have no terms in this field // should a document with no terms be at top or bottom? // this puts them at the top - if it is changed, FieldDocSortedHitQueue // needs to change as well. mterms[t++] = null; try { do { Term term = termEnum.Term; if (term == null || term.Field != field || t >= mterms.Length) { break; } // store term text mterms[t] = term.Text; termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc] = t; } t++; }while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } if (t == 0) { // if there are no terms, make the term array // have a single null entry mterms = new System.String[1]; } else if (t < mterms.Length) { // if there are less terms than documents, // trim off the dead array space System.String[] terms = new System.String[t]; Array.Copy(mterms, 0, terms, 0, t); mterms = terms; } StringIndex value_Renamed = new StringIndex(retArray, mterms); return(value_Renamed); }