public void Add (Indexable indexable, Scheduler.Priority priority)
		{
			lock (indexables) {
				indexables.Enqueue (indexable);

				if (priority > highest_prio)
					highest_prio = priority;

				if (self_task == null) {
					self_task = queryable.NewAddTask (this);
					self_task.Priority = highest_prio;
					queryable.ThisScheduler.Add (self_task);
				} else {
					self_task.Priority = highest_prio;
				}
			}
		}
		private void CrawlHook (Scheduler.Task task)
		{
			Crawl ();
			task.Reschedule = true;
			task.TriggerTime = DateTime.Now.AddHours (this.polling_interval_in_hours);
		}
		private void IndexNote (FileInfo file, Scheduler.Priority priority)
		{
			if (this.IsUpToDate (file.FullName))
				return;

			// Try and parse a Note from the given path
			Note note = TomboyNote.ParseNote (file);
			if (note == null)
				return;
			
			// A Note was returned; add it to the index
			Indexable indexable = NoteToIndexable (file, note);
			
			Scheduler.Task task = NewAddTask (indexable);
			task.Priority = priority;
			task.SubPriority = 0;
			ThisScheduler.Add (task);
		}
		static private void ShutdownHook (Scheduler.Task task)
		{
			try {
				Synchronize (SynchronizationTarget.Remote);

				// FIXME: This may not be safe to do here
				Logger.Log.Debug ("Purging locally synchronized indexes");
				Directory.Delete (local_index_dir, true);
			} catch (Exception ex) {
				Logger.Log.Error (ex, "Caught exception while doing shutdown synchronization");
			}
		}
		////////////////////////////////////////////////////////////////

		static private void SynchronizeHook (Scheduler.Task task)
                {
			try {
				Synchronize (SynchronizationTarget.Remote);
			} catch (Exception ex) {
				Logger.Log.Error (ex, "Caught exception while synchronizing");
			}

                        task.Reschedule = true;
                        task.TriggerTime = DateTime.Now.AddMinutes (sync_interval_in_minutes);
                }
		private void IndexLog (string filename, Scheduler.Priority priority)
		{
			if (! File.Exists (filename))
				return;

			if (IsUpToDate (filename))
				return;

			Indexable indexable = ImLogToIndexable (filename);
			Scheduler.Task task = NewAddTask (indexable);
			task.Priority = priority;
			task.SubPriority = 0;
			ThisScheduler.Add (task);
		}
		private void CrawlHook (Scheduler.Task task)
		{
			Crawl ();
			task.Reschedule = true;
			task.TriggerTime = DateTime.Now.AddSeconds (polling_interval_in_seconds);
		}
Example #8
0
			public void Schedule (Scheduler scheduler)
			{
				if (this.cancelled)
					return; // do not schedule a cancelled task

				// Increment the task groups the first
				// time a task is scheduled.
				if (this.scheduler == null)
					IncrementAllTaskGroups ();
				this.timestamp = DateTime.Now;
				this.scheduler = scheduler;
				this.cancelled = false;
			}
Example #9
0
			///////////////////////////////
			
			public void DoTask ()
			{
				if (! cancelled) {
					if (Debug)
						Logger.Log.Debug ("Starting task {0}", Tag);
					child_task_group = null;
					Reschedule = false;
					TouchAllTaskGroups ();

					Stopwatch sw = new Stopwatch ();
					sw.Start ();
						
					try {
						DoTaskReal ();
					} catch (Exception ex) {
						misfires ++;
						Logger.Log.Warn (ex,
								 "Caught exception in DoTaskReal\n" +
								 "        Tag: {0}\n" +
								 "    Creator: {1}\n" +
								 "Description: {2}\n" +
								 "   Priority: {3} ({4})", 
								 Tag, Creator, Description, Priority, SubPriority);
						if (misfires >= MAX_TASK_EXCEPTION) {
							Log.Warn ("More than {5} exceptions in DoTaskReal. Disabling further execution of task:\n" +
								 "        Tag: {0}\n" +
								 "    Creator: {1}\n" +
								 "Description: {2}\n" +
								 "   Priority: {3} ({4})", 
								 Tag, Creator, Description, Priority, SubPriority, MAX_TASK_EXCEPTION);
							Cancel ("Exceptions in DoTaskReal");
						}
					}
					sw.Stop ();
					if (Debug)
						Logger.Log.Debug ("Finished task {0} in {1}", Tag, sw);

					if (cancelled) {
						return;
					} else if (Reschedule) {
						++count;
						if (Debug)
							Log.Debug ("Rescheduling task {0}", Tag);
						scheduler.Add (this); // re-add ourselves
					} else {
						DecrementAllTaskGroups ();
						scheduler = null;
					}
				}
			}
		private void ScheduleRemoval (Property prop, Scheduler.Priority priority)
		{
			if (queryable.ThisScheduler.ContainsByTag (prop.ToString ())) {
				Logger.Log.Debug ("Not adding a Task for already running: {0}", prop.ToString ());
				return;
			}
			
			Scheduler.Task task = queryable.NewRemoveByPropertyTask (prop);
			task.Priority = priority;
			task.SubPriority = 0;
			queryable.ThisScheduler.Add (task);
		}
		public void ScheduleRemoval (Uri[] uris, string tag, Scheduler.Priority priority)
		{
			if (queryable.ThisScheduler.ContainsByTag (tag)) {
				Logger.Log.Debug ("Not adding a Task for already running: {0}", tag);
				return;
			}

			Scheduler.Task task = queryable.NewAddTask (new UriRemovalIndexableGenerator (uris));
			task.Priority = priority;
			task.SubPriority = 0;
			queryable.ThisScheduler.Add (task);
		}
		/////////////////////////////////////////////

		int IndexArchive (FileInfo file, Scheduler.Priority priority)
		{
                        if (this.FileAttributesStore.IsUpToDate (file.FullName))
                                return -1;

			log.Debug ("Scanning Monodoc source file " + file);

			Scheduler.TaskGroup group = NewMarkingTaskGroup (file.FullName, file.LastWriteTime);
			
			int countTypes = 0;			
			ZipFile archive = new ZipFile (file.ToString());
			
			foreach (ZipEntry entry in archive)
			{
				if (entry.Name.IndexOf (".") != -1)
					continue;

				XmlDocument document = new XmlDocument ();
				document.Load (archive.GetInputStream (entry));
			
				XmlNode type = document.SelectSingleNode ("/Type");

				if (type == null)
					continue;

				Indexable typeIndexable = TypeNodeToIndexable(type,file);
				
				Scheduler.Task typeTask = NewAddTask (typeIndexable);
				typeTask.Priority = priority;
				typeTask.SubPriority = 0;
				typeTask.AddTaskGroup (group);
				ThisScheduler.Add (typeTask);

				foreach(XmlNode member in type.SelectNodes("Members/Member"))
				{
					Indexable memberIndexable = MemberNodeToIndexable(
						member,
						file,
						type.Attributes["FullName"].Value);

					Scheduler.Task memberTask = NewAddTask (memberIndexable);
					memberTask.Priority = priority;
					memberTask.SubPriority = 0;
					memberTask.AddTaskGroup (group);
					ThisScheduler.Add (memberTask);
				}
				countTypes++;
			}

			return countTypes;
		}
		public void ScheduleIndexable (Indexable indexable, Scheduler.Priority priority)
		{
			generator.Add (indexable, priority);
		}
Example #14
0
		// Ouch! What a name ?!
		private void ContinueIndexerIndexableIndexing (Scheduler.Task task)
		{
			Flush (true);
		}
		private void IndexLauncher (FileInfo file, Scheduler.Priority priority)
		{
			if ((! file.Exists)
			    || (this.FileAttributesStore.IsUpToDate (file.FullName)))
				return;
			
			/* Check to see if file is a launcher */
			if (Beagle.Util.VFS.Mime.GetMimeType (file.FullName) != "application/x-desktop")
				return;

			StreamReader reader;

			try {
				reader = new StreamReader (file.Open (FileMode.Open, FileAccess.Read, FileShare.Read));
			} catch (Exception e) {
				log.Warn ("Could not open '{0}': {1}", file.FullName, e.Message);
				return;
			}

			if (reader.ReadLine () != "[Desktop Entry]") {
				reader.Close ();
				return;
			}

			/* I'm convinced it is a launcher */
			Indexable indexable = new Indexable (UriFu.PathToFileUri (file.FullName));

			indexable.Timestamp = file.LastWriteTime;
			indexable.Type = "Launcher";
			indexable.MimeType = "application/x-desktop";
			
			// desktop files must have a name
			bool have_name = false;

			String line;
			while ((line = reader.ReadLine ()) != null)  {
				string [] sline = line.Split ('=');
				if (sline.Length != 2)
					continue;

				// FIXME: We shouldnt really search fields that are in other locales than the current should we?

				if (sline [0].Equals ("Icon") || sline [0].Equals ("Exec")) {
					indexable.AddProperty (Beagle.Property.NewUnsearched ("fixme:" + sline[0], sline[1]));
				} else if (sline [0].StartsWith ("Name")) {
					if (sline [0] == "Name")
						have_name = true;
					indexable.AddProperty (Beagle.Property.NewUnsearched ("fixme:" + sline[0], sline[1]));
				} else if (sline[0].StartsWith ("Comment")) {
					   indexable.AddProperty (Beagle.Property.New ("fixme:" + sline[0], sline[1]));
				}
			}
			reader.Close ();
			
			if (have_name) {
				    Scheduler.Task task = NewAddTask (indexable);
				    task.Priority = priority;
				    ThisScheduler.Add (task);
			}
		}
Example #16
0
			///////////////////////////////

			// Clean-up is called whenever we know that a task will never
			// be executed.  It is never called on tasks for who DoTaskReal
			// has been called (except when rescheduled).  Cleanup is also
			// called when a task is cancelled.

			public void Cleanup ()
			{
				try {
					DoCleanup ();
				} catch (Exception ex) {
					Logger.Log.Warn (ex, "Caught exception cleaning up task '{0}'", Tag);
				} finally {
					Reschedule = false;
					scheduler = null;
				}
			}
Example #17
0
		/**
		 * for non-inotify case, this method is invoked repeatedly
		 */
		private void CrawlHook (Scheduler.Task task)
		{
			if (local_indexer != null)
				local_indexer.Crawl ();
			if (dimap_indexer != null)
				dimap_indexer.Crawl ();
			task.Reschedule = true;
			task.TriggerTime = DateTime.Now.AddSeconds (polling_interval_in_seconds);
		}
Example #18
0
		private void CrawlHook (Scheduler.Task task)
		{
			if (!ThisScheduler.ContainsByTag (crawler_tag)) {
				Crawl ();
			}

			task.Reschedule = true;
			task.TriggerTime = DateTime.Now.AddSeconds (polling_interval_in_seconds);
		}