// This doesn't check if it makes sense to actually // merge the two indexables: it just does it. public void Merge(Indexable other) { if (other.Timestamp > this.Timestamp) { this.Timestamp = other.Timestamp; } foreach (Property prop in other.Properties) { this.AddProperty(prop); } foreach (DictionaryEntry entry in other.local_state) { this.local_state [entry.Key] = entry.Value; } }
private void CopyPropertyParentToChild(Indexable parent) { // Parent is a top level indexable // Copy all properties foreach (Property prop in parent.Properties) { Property new_prop = (Property)prop.Clone(); // Add parent: to property names ONLY IF // - not private property (these are not properties of the file content) // - property name does not already start with parent: if (!new_prop.Key.StartsWith(Property.PrivateNamespace) && !new_prop.Key.StartsWith("parent:")) { new_prop.Key = "parent:" + new_prop.Key; } this.AddProperty(new_prop); } }
// FIXME: Copying the correct properties from parent to child: // (This is not perfect yet) // It does not make sense to have parent:parent:parent:...:parent:foo // for property names of a nested child // Moreover, if indexable a.mbox has child b.zip which has child c.zip, // then upon matching c.zip, we would like to get the information from // a.mbox (i.e. the toplevel indexable) only. Intermediate parent information // is not necessary for displaying results; in fact, storing them would cause // confusion during display. // E.g. storing parent:beagle:filename for all parents // would cause, parent:beagle:filename=a.mbox, parent.beagle.filename=b.zip // whereas we are only interested in toplevel parent:beagle:filename=a.mbox // For indexables which need to store the intermediate/immediate parent info // separately, explicitly store them. // Another problem is, toplevel indexable might want to store information // which should not be matched when searching for its child. Copying those // properties in all children will incorrectly match them. // private void CopyPropertyChildToChild(Indexable parent) { // If parent itself is a child, // then only copy parents' parent:xxx and _private:xxx properties foreach (Property prop in parent.Properties) { if (prop.Key.StartsWith("parent:") || prop.Key.StartsWith(Property.PrivateNamespace)) { Property new_prop = (Property)prop.Clone(); this.AddProperty(new_prop); } else { Property new_prop = (Property)prop.Clone(); new_prop.IsStored = false; this.AddProperty(new_prop); } } }
///////////////////////////////////////////////////////////////////////// static private bool ShouldWeFilterThis(Indexable indexable) { if (indexable.Filtering == IndexableFiltering.Never || indexable.NoContent) { return(false); } if (indexable.Filtering == IndexableFiltering.Always) { return(true); } // Our default behavior is to try to filter non-transient file // indexable and indexables with a specific mime type attached. if (indexable.IsNonTransient || indexable.MimeType != null) { return(true); } return(false); }
////////////////////////// public void SetChildOf(Indexable parent) { this.IsChild = true; if (parent.IsChild) { this.ParentUri = parent.ParentUri; } else { this.ParentUri = parent.Uri; } if (!this.ValidTimestamp) { this.Timestamp = parent.Timestamp; } if (string.IsNullOrEmpty(this.HitType)) { this.HitType = parent.HitType; } this.Source = parent.Source; // FIXME: Set all of the parent's properties on the // child so that we get matches against the child // that otherwise would match only the parent, at // least until we have proper RDF support. if (parent.IsChild) { CopyPropertyChildToChild(parent); } else { CopyPropertyParentToChild(parent); } }
static public bool FilterIndexable(Indexable indexable) { Filter filter = null; return(FilterIndexable(indexable, null, out filter)); }
static public bool FilterIndexable(Indexable indexable, out Filter filter) { return(FilterIndexable(indexable, null, out filter)); }
static public bool FilterIndexable(Indexable indexable, TextCache text_cache, out Filter filter) { filter = null; ICollection filters = null; if (indexable.Filtering == IndexableFiltering.AlreadyFiltered) { return(true); } if (!ShouldWeFilterThis(indexable)) { return(false); } string path = null; // First, figure out which filter we should use to deal with // the indexable. // If a specific mime type is specified, try to index as that type. if (indexable.MimeType != null) { filters = CreateFiltersFromMimeType(indexable.MimeType); } if (indexable.ContentUri.IsFile) { path = indexable.ContentUri.LocalPath; // Otherwise, set the mime type for a directory, // or sniff it from the file. if (indexable.MimeType == null) { if (Directory.Exists(path)) { indexable.MimeType = "inode/directory"; indexable.NoContent = true; } else if (File.Exists(path)) { indexable.MimeType = null; // XdgMime.GetMimeType(path); } else { //Log.Warn ("Unable to filter {0}. {1} not found.", indexable.DisplayUri, path); return(false); } } // Set the timestamp to the last write time, if it isn't // set by the backend. if (!indexable.ValidTimestamp && indexable.IsNonTransient) { indexable.Timestamp = System.IO.File.GetLastWriteTimeUtc(path); } //FileSystem.GetLastWriteTimeUtc (path); // Check the timestamp to make sure the file hasn't // disappeared from underneath us. //if (! System.IO.File.ExistsByDateTime (indexable.Timestamp)) { //Log.Warn ("Unable to filter {0}. {1} appears to have disappeared from underneath us", indexable.DisplayUri, path); // return false; //} if (filters == null || filters.Count == 0) { filters = CreateFiltersFromIndexable(indexable); } } // We don't know how to filter this, so there is nothing else to do. if (filters.Count == 0) { //if (! indexable.NoContent) // Logger.Log.Debug ("No filter for {0} ({1}) [{2}]", indexable.DisplayUri, path, indexable.MimeType); return(false); } foreach (Filter candidate_filter in filters) { //if (Debug) // Logger.Log.Debug ("Testing filter: {0}", candidate_filter); // Hook up the snippet writer. if (candidate_filter.SnippetMode && text_cache != null) { /* * if (candidate_filter.OriginalIsText && indexable.IsNonTransient) { * text_cache.MarkAsSelfCached (indexable.Uri); * } else if (indexable.CacheContent) { * TextWriter writer = text_cache.GetWriter (indexable.Uri); * candidate_filter.AttachSnippetWriter (writer); * } * */ } // Set the indexable on the filter. candidate_filter.Indexable = indexable; // Open the filter, copy the file's properties to the indexable, // and hook up the TextReaders. bool successful_open = false; TextReader text_reader; Stream binary_stream; if (path != null) { successful_open = candidate_filter.Open(path); } else if ((text_reader = indexable.GetTextReader()) != null) { successful_open = candidate_filter.Open(text_reader); } else if ((binary_stream = indexable.GetBinaryStream()) != null) { successful_open = candidate_filter.Open(binary_stream); } if (successful_open) { // Set FileType indexable.AddProperty(Property.NewKeyword("beagle:FileType", candidate_filter.FileType)); indexable.SetTextReader(candidate_filter.GetTextReader()); indexable.SetHotTextReader(candidate_filter.GetHotTextReader()); #if ENABLE_RDF_ADAPTER indexable.Links = candidate_filter.Links; #endif //if (Debug) // Logger.Log.Debug ("Successfully filtered {0} with {1}", path, candidate_filter); filter = candidate_filter; return(true); } else { //Log.Warn ("Error in filtering {0} with {1}, falling back", path, candidate_filter); candidate_filter.Cleanup(); } } //if (Debug) // Logger.Log.Debug ("None of the matching filters could process the file: {0}", path); return(false); }
protected void AddIndexable(Indexable indexable) { this.generated_indexables.Add(indexable); }
public int CompareTo(object obj) { Indexable other = (Indexable)obj; return(DateTime.Compare(this.Timestamp, other.Timestamp)); }