private void StartWorker() { if (!Directory.Exists(log_dir)) { GLib.Timeout.Add(300000, new GLib.TimeoutHandler(CheckForExistence)); return; } Log.Info("Starting konversation backend; using log files from {0}", log_dir); session_offset_table = new Dictionary <string, long> (); if (Inotify.Enabled) { Inotify.Subscribe(log_dir, OnInotify, Inotify.EventType.Create | Inotify.EventType.Modify); } initial_log_files = new ArrayList(Directory.GetFiles(log_dir)); Log.Debug("Konversation backend: found {0} log files", initial_log_files.Count); IsIndexing = true; LogIndexableGenerator generator = new LogIndexableGenerator(this, log_dir); Scheduler.Task task = NewAddTask(generator); task.Tag = log_dir; task.Source = this; ThisScheduler.Add(task); }
///////////////////////////////////////////////// private void StartWorker() { if (!Directory.Exists(log_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } Logger.Log.Info("Starting Empathy log backend"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (Inotify.Enabled) { Watch(log_dir); } crawler = new EmpathyCrawler(log_dir); Crawl(); if (!Inotify.Enabled) { Scheduler.Task task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); task.Tag = "Crawling ~/.gnome2/Empathy to find new logfiles"; task.Source = this; ThisScheduler.Add(task); } stopwatch.Stop(); Logger.Log.Info("Empathy log backend worker thread done in {0}", stopwatch); }
private void ScheduleOptimize() { double optimize_delay; // Really we only want to optimize at most once a day, even if we have // indexed a ton of dat TimeSpan span = DateTime.Now - last_optimize_time; if (span.TotalDays > 1.0) { optimize_delay = 10.0; // minutes; } else { optimize_delay = (new TimeSpan(TimeSpan.TicksPerDay) - span).TotalMinutes; } if (our_optimize_task == null) { our_optimize_task = NewOptimizeTask(); } if (OptimizeRightAway || Environment.GetEnvironmentVariable("BEAGREP_UNDER_BLUDGEON") != null) { optimize_delay = 1 / 120.0; // half a second } // Changing the trigger time of an already-scheduled process // does what you would expect. our_optimize_task.TriggerTime = DateTime.Now.AddMinutes(optimize_delay); // Adding the same task more than once is a harmless no-op. ThisScheduler.Add(our_optimize_task); }
///////////////////////////////////////////////// private void AddCrawlTask() { Scheduler.Task task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); task.Tag = String.Format("Crawling {0} to find new logfiles", pidgin_dir); task.Source = this; queryable.ThisScheduler.Add(task); }
private void StartWorker() { if (!Directory.Exists(konq_cache_dir)) { // if the directory is not present, user is not running KDE // no need to periodically check //GLib.Timeout.Add (60000, new GLib.TimeoutHandler (CheckForExistence)); return; } if (Inotify.Enabled) { // watch konq_cache_dir for new directory creations Inotify.EventType mask = Inotify.EventType.Create; Inotify.Subscribe(konq_cache_dir, OnInotifyEvent, mask); } else { Scheduler.Task crawl_task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); crawl_task.Tag = "Crawling konqueror webcache"; crawl_task.Source = this; ThisScheduler.Add(crawl_task); } Log.Info("Starting Konq history backend ..."); Crawl(); }
private void Crawl() { directory_enumerator = DirectoryWalker.GetDirectoryInfos(konq_cache_dir).GetEnumerator(); Scheduler.Task crawl_task = NewAddTask(this); crawl_task.Tag = crawler_tag; ThisScheduler.Add(crawl_task); }
private void OnInotify(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { long offset = 0; path = Path.Combine(path, subitem); if (ThisScheduler.ContainsByTag(path)) { Log.Debug("Not adding task for already running task: {0}", path); return; } lock (initial_log_files) { if (initial_log_files.Contains(path)) { Log.Debug("{0} is already scheduled for initial indexing", path); return; } } if (session_offset_table.ContainsKey(path)) { offset = session_offset_table [path]; } SessionIndexableGenerator generator = new SessionIndexableGenerator(this, path, offset); Scheduler.Task task = NewAddTask(generator); task.Tag = path; task.Source = this; ThisScheduler.Add(task); }
private void RemoveNote(string file) { Uri uri = Note.BuildNoteUri(file, "tomboy"); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
///////////////////////////////////////////// int IndexArchive(FileInfo file, Scheduler.Priority priority) { if (this.FileAttributesStore.IsUpToDate(file.FullName)) { return(-1); } log.Debug("Scanning Monodoc source file " + file); Scheduler.TaskGroup group = NewMarkingTaskGroup(file.FullName, file.LastWriteTime); int countTypes = 0; ZipFile archive = new ZipFile(file.ToString()); foreach (ZipEntry entry in archive) { if (entry.Name.IndexOf(".") != -1) { continue; } XmlDocument document = new XmlDocument(); document.Load(archive.GetInputStream(entry)); XmlNode type = document.SelectSingleNode("/Type"); if (type == null) { continue; } Indexable typeIndexable = TypeNodeToIndexable(type, file); Scheduler.Task typeTask = NewAddTask(typeIndexable); typeTask.Priority = priority; typeTask.SubPriority = 0; typeTask.AddTaskGroup(group); ThisScheduler.Add(typeTask); foreach (XmlNode member in type.SelectNodes("Members/Member")) { Indexable memberIndexable = MemberNodeToIndexable( member, file, type.Attributes["FullName"].Value); Scheduler.Task memberTask = NewAddTask(memberIndexable); memberTask.Priority = priority; memberTask.SubPriority = 0; memberTask.AddTaskGroup(group); ThisScheduler.Add(memberTask); } countTypes++; } return(countTypes); }
private void CrawlHook(Scheduler.Task task) { if (!ThisScheduler.ContainsByTag(crawler_tag)) { Crawl(); } task.Reschedule = true; task.TriggerTime = DateTime.Now.AddSeconds(polling_interval_in_seconds); }
private void RemoveBookmark(string uid) { Uri uri = new Uri(uid); Log.Debug("Removing contact {0}", uri); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void RemoveEntry(string uid) { Uri uri = new Uri(String.Format("korganizer:///{0}", uid)); Logger.Log.Debug("Removing entry {0}", uri); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void RemoveContact(string uid) { Uri uri = new Uri(String.Format("kabc:///{0}", uid)); Logger.Log.Debug("Removing contact {0}", uri); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
private void RemoveFeedFile(string file) { Log.Debug("Removing Akregator feedfile:" + file); Uri uri = UriFu.PathToFileUri(file); Scheduler.Task task = NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; ThisScheduler.Add(task); }
/** * deleting mbox means deleting all the mails which were in this mbox * we use the idea of parent-uri * while creating indexables, we set the parent uri to be the uri of the mbox file * so to delete all mails in the mbox we just delete all documents whose parent uri * is the uri of the mbox file */ public void RemoveMbox(string file) { Logger.Log.Debug("Removing mbox:" + file); Uri uri = UriFu.PathToFileUri(file); Scheduler.Task task = queryable.NewRemoveTask(uri); task.Priority = Scheduler.Priority.Immediate; task.SubPriority = 0; queryable.ThisScheduler.Add(task); }
private void AddIIndexableTask(IIndexableGenerator generator, string tag) { if (generator == null) { return; } Scheduler.Task task = queryable.NewAddTask(generator); task.Tag = tag; queryable.ThisScheduler.Add(task); }
//////////////////////////////////////////////////////////////// static private void SynchronizeHook(Scheduler.Task task) { try { Synchronize(SynchronizationTarget.Remote); } catch (Exception ex) { Logger.Log.Error(ex, "Caught exception while synchronizing"); } task.Reschedule = true; task.TriggerTime = DateTime.Now.AddMinutes(sync_interval_in_minutes); }
private void AddIndexableTask(Indexable indexable, string tag) { if (indexable == null) { return; } Scheduler.Task task = queryable.NewAddTask(indexable); task.Priority = Scheduler.Priority.Immediate; task.Tag = tag; queryable.ThisScheduler.Add(task); }
private void LaunchIndexable() { // Cancel running task before adding a new one CancelIndexable(); // Add the new indexable generator indexable_generator = new ThunderbirdIndexableGenerator(this, queryable.ToIndexDirectory); Scheduler.Task task = queryable.NewAddTask(indexable_generator); task.Tag = TAG; queryable.ThisScheduler.Add(task); }
static private void ShutdownHook(Scheduler.Task task) { try { Synchronize(SynchronizationTarget.Remote); // FIXME: This may not be safe to do here Logger.Log.Debug("Purging locally synchronized indexes"); Directory.Delete(local_index_dir, true); } catch (Exception ex) { Logger.Log.Error(ex, "Caught exception while doing shutdown synchronization"); } }
private void AddIIndexableTask(IIndexableGenerator generator, string tag) { if (queryable.ThisScheduler.ContainsByTag(tag)) { Logger.Log.Debug("Not adding a Task for already running: {0}", tag); return; } Scheduler.Task task = queryable.NewAddTask(generator); task.Tag = tag; queryable.ThisScheduler.Add(task); }
public void ScheduleRemoval(Uri[] uris, string tag, Scheduler.Priority priority) { if (queryable.ThisScheduler.ContainsByTag(tag)) { Logger.Log.Debug("Not adding a Task for already running: {0}", tag); return; } Scheduler.Task task = queryable.NewAddTask(new UriRemovalIndexableGenerator(uris)); task.Priority = priority; task.SubPriority = 0; queryable.ThisScheduler.Add(task); }
/** * for non-inotify case, this method is invoked repeatedly */ private void CrawlHook(Scheduler.Task task) { if (local_indexer != null) { local_indexer.Crawl(); } if (dimap_indexer != null) { dimap_indexer.Crawl(); } task.Reschedule = true; task.TriggerTime = DateTime.Now.AddSeconds(polling_interval_in_seconds); }
private void IndexFile(FileInfo data_file) { Indexable indexable = FileToIndexable(data_file); if (indexable == null) // The file disappeared { return; } Scheduler.Task task = NewAddTask(indexable); task.Priority = Scheduler.Priority.Immediate; ThisScheduler.Add(task); }
private void ScheduleRemoval(Property prop, Scheduler.Priority priority) { if (queryable.ThisScheduler.ContainsByTag(prop.ToString())) { Logger.Log.Debug("Not adding a Task for already running: {0}", prop.ToString()); return; } Scheduler.Task task = queryable.NewRemoveByPropertyTask(prop); task.Priority = priority; task.SubPriority = 0; queryable.ThisScheduler.Add(task); }
private void ScheduleFinalFlush() { if (our_final_flush_task == null) { our_final_flush_task = new FinalFlushTask(this); our_final_flush_task.Tag = "Final Flush for " + IndexName; our_final_flush_task.Priority = Scheduler.Priority.Maintenance; our_final_flush_task.SubPriority = 100; // do this first when starting maintenance our_final_flush_task.Source = this; } ThisScheduler.Add(our_final_flush_task); }
private ResponseMessage HandleMessage(RequestMessage msg) { IndexingServiceRequest isr = (IndexingServiceRequest)msg; LuceneQueryable backend = this; if (isr.Source != null) { Queryable target = QueryDriver.GetQueryable(isr.Source); if (target == null) { string err = String.Format("Unable to find backend matching '{0}'", isr.Source); Log.Error(err); return(new ErrorResponse(err)); } if (!(target.IQueryable is LuceneQueryable)) { string err = String.Format("Backend '{0}' is not an indexed backend", isr.Source); Log.Error(err); return(new ErrorResponse(err)); } backend = (LuceneQueryable)target.IQueryable; Log.Debug("Found backend for IndexingServiceRequest: {0}", backend.IndexName); } // FIXME: There should be a way for the request to control the // scheduler priority of the task. if (isr.ToAdd.Count > 0 || isr.ToRemove.Count > 0) { Log.Debug("IndexingService: Adding {0} indexables, removing {1} indexables.", isr.ToAdd.Count, isr.ToRemove.Count); IndexableGenerator ind_gen; ind_gen = new IndexableGenerator(isr.ToAdd, isr.ToRemove, this); Scheduler.Task task = backend.NewAddTask(ind_gen); task.Priority = Scheduler.Priority.Immediate; ThisScheduler.Add(task); } // FIXME: There should be an asynchronous response (fired by a Scheduler.Hook) // that fires when all of the items have been added to the index. // No response return(new EmptyResponse()); }
public void RemoveFolder(string folderFile) { if (queryable.ThisScheduler.ContainsByTag(folderFile)) { Logger.Log.Debug("Not adding task for already running {0}", folderFile); return; } Property prop = Property.NewUnsearched("ParentUri", folderFile); Scheduler.Task task = queryable.NewRemoveByPropertyTask(prop); task.Tag = folderFile; task.Priority = Scheduler.Priority.Immediate; queryable.ThisScheduler.Add(task); }
void IndexSingleFile(string path) { if (path.EndsWith(".new")) { return; } Indexable indexable = FileToIndexable(path, false); if (indexable == null) { return; } Scheduler.Task task = NewAddTask(indexable); task.Priority = Scheduler.Priority.Immediate; task.Tag = path; task.SubPriority = 0; ThisScheduler.Add(task); }
public void Index(FileInfo file) { Scheduler.TaskGroup group = NewMarkingTaskGroup(file.FullName, file.LastWriteTime); MessageReader reader = new MessageReader(file.FullName); while (reader.HasMoreMessages) { Message message = reader.NextMessage; Indexable indexable = MessageToIndexable(message); Scheduler.Task task = NewAddTask(indexable); task.Priority = Scheduler.Priority.Delayed; task.SubPriority = 0; task.AddTaskGroup(group); ThisScheduler.Add(task); } }