private void OnInotify(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { long offset = 0; path = Path.Combine(path, subitem); if (ThisScheduler.ContainsByTag(path)) { Log.Debug("Not adding task for already running task: {0}", path); return; } lock (initial_log_files) { if (initial_log_files.Contains(path)) { Log.Debug("{0} is already scheduled for initial indexing", path); return; } } if (session_offset_table.ContainsKey(path)) { offset = session_offset_table [path]; } SessionIndexableGenerator generator = new SessionIndexableGenerator(this, path, offset); Scheduler.Task task = NewAddTask(generator); task.Tag = path; task.Source = this; ThisScheduler.Add(task); }
private void StartWorker() { if (!Directory.Exists(konq_cache_dir)) { // if the directory is not present, user is not running KDE // no need to periodically check //GLib.Timeout.Add (60000, new GLib.TimeoutHandler (CheckForExistence)); return; } if (Inotify.Enabled) { // watch konq_cache_dir for new directory creations Inotify.EventType mask = Inotify.EventType.Create; Inotify.Subscribe(konq_cache_dir, OnInotifyEvent, mask); } else { Scheduler.Task crawl_task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); crawl_task.Tag = "Crawling konqueror webcache"; crawl_task.Source = this; ThisScheduler.Add(crawl_task); } Log.Info("Starting Konq history backend ..."); Crawl(); }
private void StartWorker() { if (!Directory.Exists(log_dir)) { GLib.Timeout.Add(300000, new GLib.TimeoutHandler(CheckForExistence)); return; } Log.Info("Starting konversation backend; using log files from {0}", log_dir); session_offset_table = new Dictionary <string, long> (); if (Inotify.Enabled) { Inotify.Subscribe(log_dir, OnInotify, Inotify.EventType.Create | Inotify.EventType.Modify); } initial_log_files = new ArrayList(Directory.GetFiles(log_dir)); Log.Debug("Konversation backend: found {0} log files", initial_log_files.Count); IsIndexing = true; LogIndexableGenerator generator = new LogIndexableGenerator(this, log_dir); Scheduler.Task task = NewAddTask(generator); task.Tag = log_dir; task.Source = this; ThisScheduler.Add(task); }
///////////////////////////////////////////////// private void StartWorker() { if (!Directory.Exists(lab_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } Log.Info("Starting Labyrinth backend"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (Inotify.Enabled) { Inotify.Subscribe(lab_dir, OnInotifyNewNote, Inotify.EventType.CloseWrite | Inotify.EventType.Modify); } Scheduler.Task task; task = NewAddTask(this); task.Tag = "Crawling Labyrinth Notes"; task.Source = this; ThisScheduler.Add(task); stopwatch.Stop(); Log.Info("labyrinth backend worker thread done in {0}", stopwatch); }
private void OnOptimizeAllEvent() { Scheduler.Task task; task = NewOptimizeTask(); // construct an optimizer task task.Priority = Scheduler.Priority.Delayed; // but boost the priority ThisScheduler.Add(task); }
private void Crawl() { directory_enumerator = DirectoryWalker.GetDirectoryInfos(konq_cache_dir).GetEnumerator(); Scheduler.Task crawl_task = NewAddTask(this); crawl_task.Tag = crawler_tag; ThisScheduler.Add(crawl_task); }
private void ScheduleOptimize() { double optimize_delay; // Really we only want to optimize at most once a day, even if we have // indexed a ton of dat TimeSpan span = DateTime.Now - last_optimize_time; if (span.TotalDays > 1.0) { optimize_delay = 10.0; // minutes; } else { optimize_delay = (new TimeSpan(TimeSpan.TicksPerDay) - span).TotalMinutes; } if (our_optimize_task == null) { our_optimize_task = NewOptimizeTask(); } if (OptimizeRightAway || Environment.GetEnvironmentVariable("BEAGREP_UNDER_BLUDGEON") != null) { optimize_delay = 1 / 120.0; // half a second } // Changing the trigger time of an already-scheduled process // does what you would expect. our_optimize_task.TriggerTime = DateTime.Now.AddMinutes(optimize_delay); // Adding the same task more than once is a harmless no-op. ThisScheduler.Add(our_optimize_task); }
public static void Poll() { foreach (Scheduler ThisScheduler in Schedulers.List.ToArray()) { ThisScheduler.Process(); } }
///////////////////////////////////////////////// private void StartWorker() { if (!Directory.Exists(log_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } Logger.Log.Info("Starting Empathy log backend"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (Inotify.Enabled) { Watch(log_dir); } crawler = new EmpathyCrawler(log_dir); Crawl(); if (!Inotify.Enabled) { Scheduler.Task task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); task.Tag = "Crawling ~/.gnome2/Empathy to find new logfiles"; task.Source = this; ThisScheduler.Add(task); } stopwatch.Stop(); Logger.Log.Info("Empathy log backend worker thread done in {0}", stopwatch); }
private void RemoveNote(string file) { Uri uri = Note.BuildNoteUri(file, "tomboy"); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
///////////////////////////////////////////// int IndexArchive(FileInfo file, Scheduler.Priority priority) { if (this.FileAttributesStore.IsUpToDate(file.FullName)) { return(-1); } log.Debug("Scanning Monodoc source file " + file); Scheduler.TaskGroup group = NewMarkingTaskGroup(file.FullName, file.LastWriteTime); int countTypes = 0; ZipFile archive = new ZipFile(file.ToString()); foreach (ZipEntry entry in archive) { if (entry.Name.IndexOf(".") != -1) { continue; } XmlDocument document = new XmlDocument(); document.Load(archive.GetInputStream(entry)); XmlNode type = document.SelectSingleNode("/Type"); if (type == null) { continue; } Indexable typeIndexable = TypeNodeToIndexable(type, file); Scheduler.Task typeTask = NewAddTask(typeIndexable); typeTask.Priority = priority; typeTask.SubPriority = 0; typeTask.AddTaskGroup(group); ThisScheduler.Add(typeTask); foreach (XmlNode member in type.SelectNodes("Members/Member")) { Indexable memberIndexable = MemberNodeToIndexable( member, file, type.Attributes["FullName"].Value); Scheduler.Task memberTask = NewAddTask(memberIndexable); memberTask.Priority = priority; memberTask.SubPriority = 0; memberTask.AddTaskGroup(group); ThisScheduler.Add(memberTask); } countTypes++; } return(countTypes); }
private void CrawlHook(Scheduler.Task task) { if (!ThisScheduler.ContainsByTag(crawler_tag)) { Crawl(); } task.Reschedule = true; task.TriggerTime = DateTime.Now.AddSeconds(polling_interval_in_seconds); }
private void RemoveBookmark(string uid) { Uri uri = new Uri(uid); Log.Debug("Removing contact {0}", uri); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void RemoveFeedFile(string file) { Log.Debug("Removing Akregator feedfile:" + file); Uri uri = UriFu.PathToFileUri(file); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void RemoveContact(string uid) { Uri uri = new Uri(String.Format("kabc:///{0}", uid)); Logger.Log.Debug("Removing contact {0}", uri); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void RemoveEntry(string uid) { Uri uri = new Uri(String.Format("korganizer:///{0}", uid)); Logger.Log.Debug("Removing entry {0}", uri); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void IndexFile(FileInfo data_file) { Indexable indexable = FileToIndexable(data_file); if (indexable == null) // The file disappeared { return; } Scheduler.Task task = NewAddTask(indexable); task.Priority = Scheduler.Priority.Immediate; ThisScheduler.Add(task); }
private void ScheduleFinalFlush() { if (our_final_flush_task == null) { our_final_flush_task = new FinalFlushTask(this); our_final_flush_task.Tag = "Final Flush for " + IndexName; our_final_flush_task.Priority = Scheduler.Priority.Maintenance; our_final_flush_task.SubPriority = 100; // do this first when starting maintenance our_final_flush_task.Source = this; } ThisScheduler.Add(our_final_flush_task); }
private ResponseMessage HandleMessage(RequestMessage msg) { IndexingServiceRequest isr = (IndexingServiceRequest)msg; LuceneQueryable backend = this; if (isr.Source != null) { Queryable target = QueryDriver.GetQueryable(isr.Source); if (target == null) { string err = String.Format("Unable to find backend matching '{0}'", isr.Source); Log.Error(err); return(new ErrorResponse(err)); } if (!(target.IQueryable is LuceneQueryable)) { string err = String.Format("Backend '{0}' is not an indexed backend", isr.Source); Log.Error(err); return(new ErrorResponse(err)); } backend = (LuceneQueryable)target.IQueryable; Log.Debug("Found backend for IndexingServiceRequest: {0}", backend.IndexName); } // FIXME: There should be a way for the request to control the // scheduler priority of the task. if (isr.ToAdd.Count > 0 || isr.ToRemove.Count > 0) { Log.Debug("IndexingService: Adding {0} indexables, removing {1} indexables.", isr.ToAdd.Count, isr.ToRemove.Count); IndexableGenerator ind_gen; ind_gen = new IndexableGenerator(isr.ToAdd, isr.ToRemove, this); Scheduler.Task task = backend.NewAddTask(ind_gen); task.Priority = Scheduler.Priority.Immediate; ThisScheduler.Add(task); } // FIXME: There should be an asynchronous response (fired by a Scheduler.Hook) // that fires when all of the items have been added to the index. // No response return(new EmptyResponse()); }
///////////////////////////////////////////////// private void Index() { if (ThisScheduler.ContainsByTag("Blam")) { Logger.Log.Debug("Not adding task for already running Blam task"); return; } ItemIndexableGenerator generator = new ItemIndexableGenerator(this, blam_dir, blam_file.FullName); Scheduler.Task task; task = NewAddTask(generator); task.Tag = "Blam"; ThisScheduler.Add(task); }
///////////////////////////////////////////////// private void IndexSingleFeed(string filename) { if (ThisScheduler.ContainsByTag(filename)) { Log.Debug("Not adding task for already running task: {0}", filename); return; } FeedIndexableGenerator generator = new FeedIndexableGenerator(this, filename); Scheduler.Task task; task = NewAddTask(generator); task.Tag = filename; task.Source = this; ThisScheduler.Add(task); }
///////////////////////////////////////////////// private void Index() { if (ThisScheduler.ContainsByTag("KAddressBook")) { Logger.Log.Debug("Not adding task for already running Kabc task"); return; } AddressIndexableGenerator generator = new AddressIndexableGenerator(this, kabc_file, last_modified_table, false); Scheduler.Task task; task = NewAddTask(generator); task.Tag = "KAddressBook"; task.Priority = Scheduler.Priority.Delayed; task.SubPriority = 0; ThisScheduler.Add(task); }
void IndexSingleFile(string path) { if (path.EndsWith(".new")) { return; } Indexable indexable = FileToIndexable(path, false); if (indexable == null) { return; } Scheduler.Task task = NewAddTask(indexable); task.Priority = Scheduler.Priority.Immediate; task.Tag = path; task.SubPriority = 0; ThisScheduler.Add(task); }
public void Index(FileInfo file) { Scheduler.TaskGroup group = NewMarkingTaskGroup(file.FullName, file.LastWriteTime); MessageReader reader = new MessageReader(file.FullName); while (reader.HasMoreMessages) { Message message = reader.NextMessage; Indexable indexable = MessageToIndexable(message); Scheduler.Task task = NewAddTask(indexable); task.Priority = Scheduler.Priority.Delayed; task.SubPriority = 0; task.AddTaskGroup(group); ThisScheduler.Add(task); } }
///////////////////////////////////////////////// private void Index() { if (ThisScheduler.ContainsByTag("KOrganizer")) { Logger.Log.Debug("Not adding task for already running KOrganizer task"); return; } // Then add the entries from the KOrganizer file EntriesIndexableGenerator generator = new EntriesIndexableGenerator(this, korganizer_file, last_modified_table, false); Scheduler.Task task; task = NewAddTask(generator); task.Tag = "KOrganizer"; // Make sure add task gets scheduled after delete task task.Priority = Scheduler.Priority.Delayed; task.SubPriority = 0; ThisScheduler.Add(task); }
private void IndexLog(string filename, Scheduler.Priority priority) { if (!File.Exists(filename)) { return; } if (IsUpToDate(filename)) { return; } Indexable indexable = ImLogToIndexable(filename); Scheduler.Task task = NewAddTask(indexable); task.Priority = priority; task.SubPriority = 0; ThisScheduler.Add(task); }
private void IndexNote(FileInfo file, Scheduler.Priority priority) { if (!File.Exists(file.FullName)) { return; } if (IsUpToDate(file.FullName)) { return; } Indexable indexable = NoteToIndexable(file); Scheduler.Task task = NewAddTask(indexable); task.Priority = priority; task.SubPriority = 0; ThisScheduler.Add(task); }
///////////////////////////////////////////////// // Parse and index a single feed private void IndexSingleFeed(string filename, bool initial_scan) { if (!filename.EndsWith(".xml")) { return; } if (ThisScheduler.ContainsByTag(filename)) { Log.Debug("Not adding task for already running task: {0}", filename); return; } FeedIndexableGenerator generator = new FeedIndexableGenerator(this, filename, initial_scan); Scheduler.Task task; task = NewAddTask(generator); task.Tag = filename; ThisScheduler.Add(task); }
/** * called by Start(), starts actual work * create indexers * ask indexers to crawl the mails * for non-inotify case, ask to poll */ private void StartWorker() { Log.Debug("Starting KMail backend"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); // check if there is at all anything to crawl if (local_path == null && (!Directory.Exists(dimap_path))) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); Log.Debug("KMail directories (local mail) " + dimap_path + " not found, will repoll."); return; } Log.Debug("Starting mail crawl"); if (local_path != null) { local_indexer = new KMailIndexer(this, "local", local_path); local_indexer.Crawl(); } // FIXME: parse kmailrc to get dimap account name if (Directory.Exists(dimap_path)) { dimap_indexer = new KMailIndexer(this, "dimap", dimap_path); dimap_indexer.Crawl(); } Log.Debug("Mail crawl done"); if (!Inotify.Enabled) { Scheduler.Task task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); task.Tag = "Crawling Maildir directories"; task.Source = this; task.TriggerTime = DateTime.Now.AddSeconds(polling_interval_in_seconds); ThisScheduler.Add(task); } stopwatch.Stop(); Log.Debug("KMail driver worker thread done in {0}", stopwatch); }
public void IndexMbox(FileInfo mboxInfo, bool inotify_event) { // If there's already a task running for this mbox, // don't interrupt it. if (ThisScheduler.ContainsByTag(mboxInfo.FullName)) { Logger.Log.Debug("Not adding task for already running task: {0}", mboxInfo.FullName); return; } Logger.Log.Debug("Will index mbox {0}", mboxInfo.FullName); EvolutionMailIndexableGeneratorMbox generator = new EvolutionMailIndexableGeneratorMbox(this, mboxInfo); Scheduler.Task task; task = NewAddTask(generator); task.Tag = mboxInfo.FullName; ThisScheduler.Add(task); AddGenerator(generator, inotify_event); }