public void RemovePage(JsonLdPage page) { lock (this) { var subjects = _subjectIndex.Keys.ToArray(); foreach (var subject in subjects) { var preds = _subjectIndex[subject].Keys.ToArray(); foreach (var pred in preds) { _subjectIndex[subject][pred].RemoveAll(t => t.Page.Equals(page)); if (_subjectIndex[subject][pred].Count < 1) { _subjectIndex[subject].Remove(pred); } } if (_subjectIndex[subject].Count < 1) { _subjectIndex.Remove(subject); } } } }
/// <summary> /// True if the page has been added to the cache. /// </summary> public bool HasPageOfEntity(Uri entity) { bool result = false; Uri uri = Utility.GetUriWithoutHash(entity); JsonLdPage page = null; if (_pages.TryGetValue(uri, out page)) { // make the page as accessed so it stays around page.UpdateLastUsed(); result = !page.IsDisposed; // to be extra safe } return(result); }
/// <summary> /// Returns the JObject of the page if we have it. /// </summary> private JObject GetEntityFromPage(Uri entity) { JObject result = null; if (Utility.IsRootUri(entity)) { JsonLdPage matchingPage = null; if (_pages.TryGetValue(entity, out matchingPage)) { matchingPage.UpdateLastUsed(); result = matchingPage.Compacted; } } return(result); }
/// <summary> /// A minimally blocking call that starts a background task to update the graph. /// </summary> /// <param name="compacted"></param> /// <param name="pageUri"></param> public void Add(JObject compacted, Uri pageUri) { Debug.Assert(pageUri.AbsoluteUri.IndexOf("#") == -1, "Add should be on the full Uri and not the child Uri!"); JsonLdPage page = new JsonLdPage(pageUri, compacted); if (_pages.TryAdd(pageUri, page)) { // start the graph load page.BeginLoad(AddCallback); } else { // clean up page.Dispose(); } }
public JsonLdTriple(JsonLdPage page, JObject jsonNode, Node subNode, Node predNode, Node objNode) : base(subNode, predNode, objNode) { _jsonNode = jsonNode; _jsonPage = page; }
public void Assert(JsonLdPage page, JObject jsonNode, Node subNode, Node predNode, Node objNode) { Assert(new JsonLdTriple(page, jsonNode, subNode, predNode, objNode)); }
/// <summary> /// Removes all pages not used within the given time span. /// </summary> private void CleanUp(TimeSpan keepPagesUsedWithin) { DateTime cutOff = DateTime.UtcNow.Subtract(keepPagesUsedWithin); // lock to keep any new pages from being added during this lock (this) { // just in case we show really late if (_disposed) { return; } // create a working set of pages that can be considered locked JsonLdPage[] pages = _pages.Values.ToArray(); // if pages are still loading we should skip the clean up // TODO: post-preview this should force a clean up if the graph is huge if (pages.All(p => p.IsLoaded)) { // check if a clean up is needed if (pages.Any(p => !p.UsedAfter(cutOff))) { List <JsonLdPage> keep = new List <JsonLdPage>(pages.Length); List <JsonLdPage> remove = new List <JsonLdPage>(pages.Length); // pages could potentially change last accessed times, so make the decisions in one shot foreach (var page in pages) { if (page.UsedAfter(cutOff)) { keep.Add(page); } else { remove.Add(page); } } // second check to make sure we need to do this if (remove.Count > 0) { DataTraceSources.Verbose("[EntityCache] EntityCache rebuild started."); JsonLdGraph graph = new JsonLdGraph(); // graph merge foreach (var page in keep) { graph.Merge(page.Graph); } _masterGraph = graph; DataTraceSources.Verbose("[EntityCache] EntityCache rebuild complete."); // remove and dispose of the old pages foreach (var page in remove) { JsonLdPage removedPage = null; if (_pages.TryRemove(page.Uri, out removedPage)) { Debug.Assert(!removedPage.UsedAfter(cutOff), "Someone used a page that was scheduled to be removed. This should have been locked."); removedPage.Dispose(); } else { Debug.Fail(page.Uri.AbsoluteUri + " disappeared from the page cache."); } } } } } } }
/// <summary> /// Called by JsonLdPage after the graph has loaded. /// </summary> private void AddCallback(JsonLdPage page) { page.UpdateLastUsed(); MergeGraph(page.Graph); }
/// <summary> /// Load a compacted json object into a JsonLdGraph /// </summary> public static JsonLdGraph Load(JObject compacted, JsonLdPage page) { Dictionary <int, JObject> nodes = new Dictionary <int, JObject>(); int marker = 0; // Mark each node with a serial number Action <JObject> addSerial = (node) => { if (!Utility.IsInContext(node)) { int serial = marker++; node[Constants.CacheNode] = serial; nodes.Add(serial, node); } }; // add serials Utility.JsonEntityVisitor(compacted, addSerial); // create graph without JTokens var basicGraph = Utility.GetGraphFromCompacted(compacted); // split out the cache triples List <Triple> normalTriples = new List <Triple>(); Dictionary <string, JObject> cacheTriples = new Dictionary <string, JObject>(); foreach (var triple in basicGraph.Triples) { // cache node predicates represent the mapping between the subject and token serial if (triple.Predicate.IsValue(Constants.CacheNode)) { string subject = triple.Subject.GetValue(); int serial; Int32.TryParse(triple.Object.GetValue(), out serial); // Remove the serial we added JObject jObject = nodes[serial]; jObject.Remove(Constants.CacheNode); // there should not be any duplicates here cacheTriples.Add(subject, jObject); } else { // store this to go into the graph normalTriples.Add(triple); } } // create the real graph JsonLdGraph jsonGraph = new JsonLdGraph(); // merge the graph data with the compacted json tokens foreach (var triple in normalTriples) { string subject = triple.Subject.GetValue(); JObject jObject = null; cacheTriples.TryGetValue(subject, out jObject); var jsonTriple = new JsonLdTriple(page, jObject, triple.Subject, triple.Predicate, triple.Object); jsonGraph.Assert(jsonTriple); } return(jsonGraph); }
/// <summary> /// Load a compacted json object into a JsonLdGraph /// </summary> public static async Task <JsonLdGraph> LoadAsync(JObject compacted, JsonLdPage page) { return(await Task.Run(() => Load(compacted, page))); }