private void StartWorker() { if (!Directory.Exists(log_dir)) { GLib.Timeout.Add(300000, new GLib.TimeoutHandler(CheckForExistence)); return; } Log.Info("Starting konversation backend; using log files from {0}", log_dir); session_offset_table = new Dictionary <string, long> (); if (Inotify.Enabled) { Inotify.Subscribe(log_dir, OnInotify, Inotify.EventType.Create | Inotify.EventType.Modify); } initial_log_files = new ArrayList(Directory.GetFiles(log_dir)); Log.Debug("Konversation backend: found {0} log files", initial_log_files.Count); IsIndexing = true; LogIndexableGenerator generator = new LogIndexableGenerator(this, log_dir); Scheduler.Task task = NewAddTask(generator); task.Tag = log_dir; task.Source = this; ThisScheduler.Add(task); }
///////////////////////////////////////////////// // Sets up an Inotify watch on all subdirectories private void Watch(string path) { DirectoryInfo root = new DirectoryInfo(path); if (!root.Exists) { return; } Queue queue = new Queue(); queue.Enqueue(root); while (queue.Count > 0) { DirectoryInfo dir = queue.Dequeue() as DirectoryInfo; // Setup watches on the present directory. Inotify.Subscribe(dir.FullName, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.CloseWrite); // Add all subdirectories to the queue so their files can be indexed. foreach (DirectoryInfo subdir in dir.GetDirectories()) { queue.Enqueue(subdir); } } }
private void StartWorker() { if (!Directory.Exists(konq_cache_dir)) { // if the directory is not present, user is not running KDE // no need to periodically check //GLib.Timeout.Add (60000, new GLib.TimeoutHandler (CheckForExistence)); return; } if (Inotify.Enabled) { // watch konq_cache_dir for new directory creations Inotify.EventType mask = Inotify.EventType.Create; Inotify.Subscribe(konq_cache_dir, OnInotifyEvent, mask); } else { Scheduler.Task crawl_task = Scheduler.TaskFromHook(new Scheduler.TaskHook(CrawlHook)); crawl_task.Tag = "Crawling konqueror webcache"; crawl_task.Source = this; ThisScheduler.Add(crawl_task); } Log.Info("Starting Konq history backend ..."); Crawl(); }
public bool HasNextIndexable() { do { while (file_enumerator == null || !file_enumerator.MoveNext()) { if (!directory_enumerator.MoveNext()) { Logger.Log.Debug("KonqQ: Crawling done"); file_enumerator = null; current_file = null; return(false); } DirectoryInfo current_dir = (DirectoryInfo)directory_enumerator.Current; //Logger.Log.Debug ("Trying dir:" + current_dir.Name); // start watching for new files and get the list of current files // kind of race here - might get duplicate files if (Inotify.Enabled) { Inotify.Subscribe(current_dir.FullName, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.MovedTo); } file_enumerator = DirectoryWalker.GetFileInfos(current_dir).GetEnumerator(); } current_file = (FileInfo)file_enumerator.Current; //if (!IsUpToDate (current_file.FullName)) // Logger.Log.Debug (current_file.FullName + " is not upto date"); // KDE4 cache contains _freq files which are non-cache files } while (current_file.FullName.EndsWith("_freq") || IsUpToDate(current_file.FullName)); return(true); }
///////////////////////////////////////////////// private void StartWorker() { if (!Directory.Exists(lab_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } Log.Info("Starting Labyrinth backend"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (Inotify.Enabled) { Inotify.Subscribe(lab_dir, OnInotifyNewNote, Inotify.EventType.CloseWrite | Inotify.EventType.Modify); } Scheduler.Task task; task = NewAddTask(this); task.Tag = "Crawling Labyrinth Notes"; task.Source = this; ThisScheduler.Add(task); stopwatch.Stop(); Log.Info("labyrinth backend worker thread done in {0}", stopwatch); }
/** * Add watch to the parameter directory and its subdirs, recursively */ public void Watch(string path) { DirectoryInfo root = new DirectoryInfo(path); if (!root.Exists) { return; } Queue queue = new Queue(); queue.Enqueue(root); while (queue.Count > 0) { DirectoryInfo dir = queue.Dequeue() as DirectoryInfo; if (!dir.Exists) { continue; } //log.Debug ("Adding inotify watch to " + dir.FullName); Inotify.Subscribe(dir.FullName, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.Delete | Inotify.EventType.MovedFrom | Inotify.EventType.MovedTo); foreach (DirectoryInfo subdir in DirectoryWalker.GetDirectoryInfos(dir)) { queue.Enqueue(subdir); } } }
private void StartWorker() { if (!Directory.Exists(blam_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } if (Inotify.Enabled) { Inotify.EventType mask = Inotify.EventType.CloseWrite; Inotify.Subscribe(blam_dir, OnInotifyEvent, mask); } else { FileSystemWatcher fsw = new FileSystemWatcher(); fsw.Path = blam_dir; fsw.Filter = blam_file.Name; fsw.Changed += new FileSystemEventHandler(OnChangedEvent); fsw.Created += new FileSystemEventHandler(OnChangedEvent); fsw.EnableRaisingEvents = true; } if (File.Exists(blam_file.FullName)) { Index(); } }
private void OnLsongsChanged(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { string file = Path.Combine (path, subitem); if (file == dbpath) { RefreshTracks (); Daemon.Server.Commit (); } }
private void Quit() { Console.WriteLine(ref_count); if (--ref_count >= 1) { return; } Inotify.Stop(); Gtk.Application.Quit(); }
public void Start() { // Make sure we catch file system changes Inotify.Subscribe(queryable.ToIndexDirectory, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.DeleteSelf); // Start the indexable generator and begin adding things to the index LaunchIndexable(); }
public override void Start() { base.Start(); // delay everything till the backend is actually started Inotify.Subscribe(IndexDirectory, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.Delete | Inotify.EventType.DeleteSelf); ExceptionHandlingThread.Start(new ThreadStart(StartWorker)); }
private void CrawlProtocolDirectory(string proto_dir, bool index) { if (Inotify.Enabled) { Inotify.Subscribe(proto_dir, OnInotifyNewAccount, Inotify.EventType.Create); } // Walk through accounts foreach (string account_dir in DirectoryWalker.GetDirectories(proto_dir)) { CrawlAccountDirectory(account_dir, index); } }
private void Crawl(bool index) { //queryable.IsIndexing = true; if (Inotify.Enabled) { Inotify.Subscribe(logs_dir, OnInotifyNewProtocol, Inotify.EventType.Create); } // Walk through protocol subdirs foreach (string proto_dir in DirectoryWalker.GetDirectories(logs_dir)) { CrawlProtocolDirectory(proto_dir, index); } }
private void CrawlAccountDirectory(string account_dir, bool index) { if (Inotify.Enabled) { Inotify.Subscribe(account_dir, OnInotifyNewRemote, Inotify.EventType.Create); } // Walk through remote user conversations foreach (string remote_dir in DirectoryWalker.GetDirectories(account_dir)) { if (remote_dir.IndexOf(".system") < 0) { CrawlRemoteDirectory(remote_dir, index); } } }
private void StartWorker() { if (!Directory.Exists(akregator_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } if (Inotify.Enabled) { Inotify.EventType mask = Inotify.EventType.CloseWrite | Inotify.EventType.Delete; Inotify.Subscribe(akregator_dir, OnInotifyEvent, mask); } else { FileSystemWatcher fsw = new FileSystemWatcher(); fsw.Path = akregator_dir; fsw.Changed += new FileSystemEventHandler(OnChanged); fsw.Created += new FileSystemEventHandler(OnChanged); fsw.EnableRaisingEvents = true; } Log.Info("Scanning Akregator feeds..."); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); DirectoryInfo dir = new DirectoryInfo(akregator_dir); int count = 0; foreach (FileInfo file in DirectoryWalker.GetFileInfos(dir)) { if (file.Extension == ".xml") { IndexSingleFeed(file.FullName, true); count++; } } stopwatch.Stop(); Log.Info("{0} files will be parsed (scanned in {1})", count, stopwatch); }
///////////////////////////////////////////////////////////////////////////// private static void OnShutdown() { #if ENABLE_AVAHI zeroconf.Dispose(); #endif // Stop our Inotify threads Inotify.Stop(); // Stop the global scheduler and ask it to shutdown Scheduler.Global.Stop(true); // Stop the messaging server if (server != null) { server.Stop(); } }
public OperaIndexer(OperaQueryable queryable, FileAttributesStore store, string root_dir) { this.attribute_store = store; this.queryable = queryable; this.cache_dirs = new ArrayList(); // Try to find all cache dirs foreach (string dir in DirectoryWalker.GetDirectories(root_dir)) { foreach (string file in DirectoryWalker.GetItems (dir, new DirectoryWalker.FileFilter(IsCacheFile))) { Inotify.Subscribe(dir, OnInotify, Inotify.EventType.MovedTo | Inotify.EventType.CloseWrite); cache_dirs.Add(dir); } } }
public object CreateWatch(string path) { object watch = null; try { watch = Inotify.Subscribe(path, inotify_callback, Inotify.EventType.Create | Inotify.EventType.Delete | Inotify.EventType.CloseWrite | Inotify.EventType.MovedFrom | Inotify.EventType.MovedTo | Inotify.EventType.Attrib); } catch (IOException) { // We can race and files can disappear. No big deal. } return(watch); }
private void StartWorker() { if (!CheckForDirectory()) { Log.Debug("Watching for creation of Liferea directory"); GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } if (Inotify.Enabled) { Inotify.EventType mask = Inotify.EventType.CloseWrite | Inotify.EventType.Delete; Inotify.Subscribe(liferea_dir, OnInotifyEvent, mask); } else { FileSystemWatcher fsw = new FileSystemWatcher(); fsw.Path = liferea_dir; fsw.Changed += new FileSystemEventHandler(OnChanged); fsw.Created += new FileSystemEventHandler(OnChanged); fsw.EnableRaisingEvents = true; } Log.Info("Scanning Liferea feeds..."); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); DirectoryInfo dir = new DirectoryInfo(liferea_dir); int count = 0; foreach (FileInfo file in DirectoryWalker.GetFileInfos(dir)) { IndexSingleFeed(file.FullName); } stopwatch.Stop(); Log.Info("{0} files will be parsed (scanned in {1})", count, stopwatch); }
private void StartWorker() { log.Info("Scanning Monodoc sources"); Stopwatch timer = new Stopwatch(); timer.Start(); int foundSources = 0; int foundTypes = 0; DirectoryInfo root = new DirectoryInfo(monodoc_dir); if (Inotify.Enabled) { monodoc_wd = Inotify.Watch(root.FullName, Inotify.EventType.CloseWrite | Inotify.EventType.CreateFile); Inotify.Event += OnInotifyEvent; } else { FileSystemWatcher fsw = new FileSystemWatcher(); fsw.Path = monodoc_dir; fsw.Filter = "*.zip"; fsw.Changed += new FileSystemEventHandler(OnChangedEvent); fsw.Created += new FileSystemEventHandler(OnChangedEvent); fsw.EnableRaisingEvents = true; } foreach (FileInfo file in root.GetFiles("*.zip")) { int result = IndexArchive(file, Scheduler.Priority.Delayed); if (result != -1) { foundSources++; foundTypes += result; } } timer.Stop(); log.Info("Found {0} types in {1} Monodoc sources in {2}", foundTypes, foundSources, timer); }
private void CrawlRemoteDirectory(string remote_dir, bool index) { if (Inotify.Enabled) { Inotify.Subscribe(remote_dir, OnInotifyNewConversation, Inotify.EventType.CloseWrite | Inotify.EventType.Modify); } if (index) { foreach (FileInfo file in DirectoryWalker.GetFileInfos(remote_dir)) { if (FileIsInteresting(file.Name)) { IndexLog(file.FullName, Scheduler.Priority.Delayed); } } //queryable.IsIndexing = false; } }
private void StartWorker() { if (!Directory.Exists(konq_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } if (Inotify.Enabled) { Inotify.EventType mask = Inotify.EventType.CloseWrite | Inotify.EventType.MovedTo; Inotify.Subscribe(konq_dir, OnInotifyEvent, mask); } else { FileSystemWatcher fsw = new FileSystemWatcher(); fsw.Path = konq_dir; fsw.Filter = bookmark_file; fsw.Changed += new FileSystemEventHandler(OnChanged); fsw.Created += new FileSystemEventHandler(OnChanged); fsw.Renamed += new RenamedEventHandler(OnChanged); fsw.EnableRaisingEvents = true; } if (File.Exists(bookmark_file)) { if (!FileAttributesStore.IsUpToDate(bookmark_file)) { Index(); } else { ScanBookmarkInitial(); } } }
private void StartWorker() { if (Inotify.Enabled) { // Nautilus creates a temporary file, writes // out the content, and moves it on top of any // previous file. Files are never removed. So // we only need to watch the MovedTo event. Inotify.EventType mask = Inotify.EventType.MovedTo; Inotify.Subscribe(nautilus_dir, OnInotifyEvent, mask); } // Start our crawler process Scheduler.Task task; task = this.target_queryable.NewAddTask(this); task.Tag = "Crawling Nautilus Metadata"; task.Source = this; ThisScheduler.Add(task); Log.Info("Nautilus metadata backend started"); }
private void StartWorker() { if (!Directory.Exists(tomboy_dir)) { GLib.Timeout.Add(60000, new GLib.TimeoutHandler(CheckForExistence)); return; } if (Inotify.Enabled) { Inotify.EventType mask = Inotify.EventType.Delete | Inotify.EventType.MovedTo | Inotify.EventType.MovedFrom; Inotify.Subscribe(tomboy_dir, OnInotifyEvent, mask); } else { FileSystemWatcher fsw = new FileSystemWatcher(); fsw.Path = tomboy_dir; fsw.Filter = "*.note"; fsw.Changed += new FileSystemEventHandler(OnChanged); fsw.Created += new FileSystemEventHandler(OnChanged); fsw.Deleted += new FileSystemEventHandler(OnDeleted); fsw.EnableRaisingEvents = true; } // Start our crawler process Scheduler.Task task; task = NewAddTask(this); task.Tag = "Crawling Tomboy Notes"; task.Source = this; ThisScheduler.Add(task); Logger.Log.Info("Tomboy backend started"); }
private void StartWorker() { string index_path = Path.Combine(PathFinder.StorageDir, "ToIndex"); if (!Directory.Exists(index_path)) { Directory.CreateDirectory(index_path); } if (Inotify.Enabled) { Inotify.Subscribe(index_path, OnInotifyEvent, Inotify.EventType.CloseWrite); } Logger.Log.Info("Setting up an initial crawl of the IndexingService directory"); IndexableGenerator generator = new IndexableGenerator(GetIndexables(index_path)); Scheduler.Task task = NewAddTask(generator); task.Tag = "IndexingService initial crawl"; ThisScheduler.Add(task); }
public ThunderbirdIndexer(ThunderbirdQueryable queryable, string[] root_paths) { this.queryable = queryable; this.root_paths = root_paths; this.supported_types = new Hashtable(); this.init_phase = true; this.first_lap = true; this.account_list = new ArrayList(); this.inotify = new ThunderbirdInotify(); LoadSupportedTypes(); foreach (string path in root_paths) { Inotify.Subscribe(path, OnInotifyEvent, Inotify.EventType.Delete | Inotify.EventType.MovedTo | Inotify.EventType.Modify | Inotify.EventType.Create); } inotify.InotifyEvent += OnInotifyEvent; }
// Crawl the specified directory and all subdirectories, indexing all // discovered launchers. If Inotify is available, every directory // scanned will be watched. private int CrawlLaunchers(string path) { DirectoryInfo root = new DirectoryInfo(path); if (!root.Exists) { return(0); } int fileCount = 0; Queue queue = new Queue(); queue.Enqueue(root); while (queue.Count > 0) { DirectoryInfo dir = queue.Dequeue() as DirectoryInfo; if (Inotify.Enabled) { Inotify.Subscribe(dir.FullName, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.Modify); } foreach (FileInfo file in dir.GetFiles()) { IndexLauncher(file, Scheduler.Priority.Delayed); ++fileCount; } foreach (DirectoryInfo subdir in dir.GetDirectories()) { queue.Enqueue(subdir); } } return(fileCount); }
public Search(bool icon_enabled, bool docs_enabled) { this.icon_enabled = icon_enabled; this.docs_enabled = docs_enabled; if (icon_enabled) { icon_window = new SearchWindow(this); icon_window.QueryEvent += OnQueryEvent; tray = new TrayIcon(); tray.Clicked += OnTrayActivated; tray.Search += OnTraySearch; tray.Quit += OnTrayQuit; Config config = Conf.Get(Conf.Names.BeagleSearchConfig); keybinder = new XKeybinder(); SetKeyBindings(config); Inotify.Start(); Conf.WatchForUpdates(); Conf.Subscribe(Conf.Names.BeagleSearchConfig, OnConfigurationChanged); } }
///////////////////////////////////////////////// // Modified/Created event using Inotify private void OnInotifyEvent(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { if (subitem == "") { return; } // Watch konq_cache_dir for new directory creation // Watch its subdirectories for new file creation // If any file in created in konq_cache_dir, ignore it // Its a Konq error otherwise if ((type & Inotify.EventType.IsDirectory) == 0) { IndexSingleFile(Path.Combine(path, subitem)); } else if ((type & Inotify.EventType.IsDirectory) != 0) { Inotify.Subscribe(konq_cache_dir, OnInotifyEvent, Inotify.EventType.CloseWrite); } }
/** * Recursively traverse the files and dirctories under mail_root * to find files that need to be indexed, directories that * need to be watched for changes */ public void Crawl() { if (!Directory.Exists(mail_root)) { return; } mail_directories.Clear(); folder_directories.Clear(); mbox_files.Clear(); Queue pending = new Queue(); pending.Enqueue(mail_root); folder_directories.Add(mail_root); // add inotify watch to root folder if (Inotify.Enabled) { Inotify.Subscribe(mail_root, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.Delete | Inotify.EventType.MovedFrom | Inotify.EventType.MovedTo | Inotify.EventType.Modify); } while (pending.Count > 0) { string dir = (string)pending.Dequeue(); Logger.Log.Debug("Searching for mbox and maildirs in " + dir); foreach (FileInfo fi in DirectoryWalker.GetFileInfos(dir)) { if (!fi.Name.EndsWith(".index")) { continue; } string indexFile = fi.Name; string mailFolderName = indexFile.Substring(1, indexFile.LastIndexOf(".index") - 1); string mailFolder = Path.Combine(dir, mailFolderName); if (IgnoreFolder(mailFolder)) { continue; } if (Directory.Exists(mailFolder)) { mail_directories.Add(mailFolder); if (Inotify.Enabled) { Watch(mailFolder); } } else if (File.Exists(mailFolder)) { mbox_files.Add(mailFolder); } // if there is a directory with name .<mailFolderName>.directory // then it contains sub-folders string subFolder = Path.Combine(dir, "." + mailFolderName + ".directory"); if (Directory.Exists(subFolder)) { pending.Enqueue(subFolder); folder_directories.Add(subFolder); if (Inotify.Enabled) { Inotify.Subscribe(subFolder, OnInotifyEvent, Inotify.EventType.Create | Inotify.EventType.Delete | Inotify.EventType.MovedFrom | Inotify.EventType.MovedTo | Inotify.EventType.Modify); } } } } // copy the contents as mail_directories, mbox_files might change due to async events ArrayList _mail_directories = new ArrayList(mail_directories); ArrayList _mbox_files = new ArrayList(mbox_files); if (queryable.ThisScheduler.ContainsByTag(mail_root)) { Logger.Log.Debug("Not adding task for already running task: {0}", mail_root); return; } else { KMaildirIndexableGenerator generator = new KMaildirIndexableGenerator(this, _mail_directories); AddIIndexableTask(generator, mail_root); } foreach (string mbox_file in _mbox_files) { IndexMbox(mbox_file, true); } }
private void OnRhythmboxChanged(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { string file = Path.Combine (path, subitem); if (file == dbpath) { RefreshTracks (); Daemon.Server.Commit (); } else if (file == plpath) { RefreshPlaylists (); Daemon.Server.Commit (); } }
private void OnBansheeChanged(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { string file = Path.Combine (path, subitem); if (file != dbpath) return; lock (refreshLock) { lastChange = DateTime.Now; Monitor.Pulse (refreshLock); } }
private void OnAmarokChanged(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { string file = Path.Combine (path, subitem); if ((type & Inotify.EventType.CloseWrite) > 0 && file == dbpath) { try { RefreshTracks (); } catch (Exception e) { // sometimes we get some crappy random sqlite errors (race somewhere?); eat them } } else if ((type & Inotify.EventType.Create) > 0 && (type & Inotify.EventType.IsDirectory) > 0 && file == pldir) { InitPlaylists (); } lock (commitLock) { lastChange = DateTime.Now; Monitor.Pulse (commitLock); } }
private void OnPlaylistsChanged(Inotify.Watch watch, string path, string subitem, string srcpath, Inotify.EventType type) { string file = Path.Combine (path, subitem); if (!file.EndsWith ("m3u")) return; if ((type & Inotify.EventType.CloseWrite) > 0) { RefreshPlaylist (file); } else if ((type & Inotify.EventType.MovedTo) > 0) { RemovePlaylist (srcpath); RefreshPlaylist (file); } else if (((type & Inotify.EventType.Delete) > 0 || (type & Inotify.EventType.MovedFrom) > 0) && playlists.ContainsKey (file)) { RemovePlaylist (file); } lock (commitLock) { lastChange = DateTime.Now; Monitor.Pulse (commitLock); } }
public static bool StartupProcess() { // Profile our initialization Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); // Fire up our server if (!StartServer()) { if (!arg_replace) { Logger.Log.Error("Could not set up the listener for beagrep requests. " + "There is probably another beagrepd instance running. " + "Use --replace to replace the running service"); Environment.Exit(1); } ReplaceExisting(); } // Set up out-of-process indexing LuceneQueryable.IndexerHook = new LuceneQueryable.IndexerCreator(RemoteIndexer.NewRemoteIndexer); Config config = Conf.Get(Conf.Names.DaemonConfig); // Initialize synchronization to keep the indexes local if PathFinder.StorageDir // is on a non-block device, or if BEAGREP_SYNCHRONIZE_LOCALLY is set if ((!SystemInformation.IsPathOnBlockDevice(PathFinder.StorageDir) && config.GetOption(Conf.Names.IndexSynchronization, true)) || Environment.GetEnvironmentVariable("BEAGREP_SYNCHRONIZE_LOCALLY") != null) { IndexSynchronization.Initialize(); } // Start the query driver. Logger.Log.Debug("Starting QueryDriver"); QueryDriver.Start(); // Start our battery monitor so we can shut down the // scheduler if needed. BatteryMonitor.Init(); bool initially_on_battery = !BatteryMonitor.UsingAC && !config.GetOption(Conf.Names.IndexOnBattery, false); // Start the Global Scheduler thread if (!arg_disable_scheduler) { if (!initially_on_battery) { Logger.Log.Debug("Starting Scheduler thread"); Scheduler.Global.Start(); } else { Log.Debug("Beagrep started on battery, not starting scheduler thread"); } } // Start our Inotify threads Inotify.Start(); // Test if the FileAdvise stuff is working: This will print a // warning if not. The actual advice calls will fail silently. FileAdvise.TestAdvise(); #if ENABLE_AVAHI zeroconf = new Beagrep.Daemon.Network.Zeroconf(); #endif Conf.WatchForUpdates(); stopwatch.Stop(); Logger.Log.Debug("Daemon initialization finished after {0}", stopwatch); SystemInformation.LogMemoryUsage(); if (arg_indexing_test_mode) { Thread.Sleep(1000); // Ugly paranoia: wait a second for the backends to settle. Logger.Log.Debug("Running in indexing test mode"); Scheduler.Global.EmptyQueueEvent += OnEmptySchedulerQueue; Scheduler.Global.Add(null); // pulse the scheduler } return(false); }