private Yield GetCache(uint pageId, string wikiId, DateTime time, CultureInfo culture, Result <PageChangeCacheData> result) { // Note (arnec): going back 10 seconds before event, because timestamps in a request are not currently synced string keytime = time.ToString("yyyyMMddHHmm"); string since = time.Subtract(TimeSpan.FromSeconds(10)).ToString("yyyyMMddHHmmss"); PageChangeCacheData cacheData; string key = string.Format("{0}:{1}:{2}:{3}", pageId, wikiId, keytime, culture); _log.DebugFormat("getting data for key: {0}", key); lock (_cache) { if (_cache.TryGetValue(key, out cacheData)) { result.Return(cacheData); yield break; } } // fetch the page data Result <DreamMessage> pageResponse; yield return(pageResponse = _deki .At("pages", pageId.ToString()) .WithHeader("X-Deki-Site", "id=" + wikiId) .With("redirects", "0").GetAsync()); if (!pageResponse.Value.IsSuccessful) { _log.WarnFormat("Unable to fetch page '{0}' info: {1}", pageId, pageResponse.Value.Status); result.Return((PageChangeCacheData)null); yield break; } XDoc page = pageResponse.Value.ToDocument(); string title = page["title"].AsText; XUri pageUri = page["uri.ui"].AsUri; string pageUriString = CleanUriForEmail(pageUri); string unsubUri = CleanUriForEmail(pageUri .WithoutPathQueryFragment() .At("index.php") .With("title", "Special:PageAlerts") .With("id", pageId.ToString())); // fetch the revision history Result <DreamMessage> feedResponse; yield return(feedResponse = _deki .At("pages", pageId.ToString(), "feed") .WithHeader("X-Deki-Site", "id=" + wikiId) .With("redirects", "0") .With("format", "raw") .With("since", since) .GetAsync()); if (!feedResponse.Value.IsSuccessful) { _log.WarnFormat("Unable to fetch page '{0}' changes: {1}", pageId, feedResponse.Value.Status); result.Return((PageChangeCacheData)null); yield break; } // build the docs XDoc feed = feedResponse.Value.ToDocument()["change"]; if (feed.ListLength == 0) { _log.WarnFormat("Change feed is empty for page: {0}", pageId); result.Return((PageChangeCacheData)null); yield break; } string who = feed["rc_user_name"].AsText; string whoUri = CleanUriForEmail(pageUri.WithoutPathQueryFragment().At(XUri.EncodeSegment("User:"******"rc_comment"].AsText; string revisionUri = CleanUriForEmail(pageUri.With("revision", change["rc_revision"].AsText)); who = change["rc_user_name"].AsText; whoUri = CleanUriForEmail(pageUri.WithoutPathQueryFragment().At(XUri.EncodeSegment("User:"******"rc_timestamp"].AsText); cacheData.Items.Add(item); } lock (_cache) { // even though we override the entry if one was created in the meantime // we do the existence check so that we don't set up two expiration timers; if (!_cache.ContainsKey(key)) { _cacheItemCallback(key, () => { lock (_cache) { _cache.Remove(key); } }); } _cache[key] = cacheData; } result.Return(cacheData); yield break; }
private Yield GetCache(uint pageId, string wikiId, DateTime time, CultureInfo culture, Result<PageChangeCacheData> result) { // Note (arnec): going back 10 seconds before event, because timestamps in a request are not currently synced string keytime = time.ToString("yyyyMMddHHmm"); string since = time.Subtract(TimeSpan.FromSeconds(10)).ToString("yyyyMMddHHmmss"); PageChangeCacheData cacheData; string key = string.Format("{0}:{1}:{2}:{3}", pageId, wikiId, keytime, culture); _log.DebugFormat("getting data for key: {0}", key); lock(_cache) { if(_cache.TryGetValue(key, out cacheData)) { result.Return(cacheData); yield break; } } // fetch the page data Result<DreamMessage> pageResponse; yield return pageResponse = _deki .At("pages", pageId.ToString()) .WithHeader("X-Deki-Site", "id=" + wikiId) .With("redirects", "0").GetAsync(); if(!pageResponse.Value.IsSuccessful) { _log.WarnFormat("Unable to fetch page '{0}' info: {1}", pageId, pageResponse.Value.Status); result.Return((PageChangeCacheData)null); yield break; } XDoc page = pageResponse.Value.ToDocument(); string title = page["title"].AsText; XUri pageUri = page["uri.ui"].AsUri; string pageUriString = CleanUriForEmail(pageUri); string unsubUri = CleanUriForEmail(pageUri .WithoutPathQueryFragment() .At("index.php") .With("title", "Special:PageAlerts") .With("id", pageId.ToString())); // fetch the revision history Result<DreamMessage> feedResponse; yield return feedResponse = _deki .At("pages", pageId.ToString(), "feed") .WithHeader("X-Deki-Site", "id=" + wikiId) .With("redirects", "0") .With("format", "raw") .With("since", since) .GetAsync(); if(!feedResponse.Value.IsSuccessful) { _log.WarnFormat("Unable to fetch page '{0}' changes: {1}", pageId, feedResponse.Value.Status); result.Return((PageChangeCacheData)null); yield break; } // build the docs XDoc feed = feedResponse.Value.ToDocument()["change"]; if(feed.ListLength == 0) { _log.WarnFormat("Change feed is empty for page: {0}", pageId); result.Return((PageChangeCacheData)null); yield break; } string who = feed["rc_user_name"].AsText; string whoUri = CleanUriForEmail(pageUri.WithoutPathQueryFragment().At(XUri.EncodeSegment("User:"******"rc_comment"].AsText; string revisionUri = CleanUriForEmail(pageUri.With("revision", change["rc_revision"].AsText)); who = change["rc_user_name"].AsText; whoUri = CleanUriForEmail(pageUri.WithoutPathQueryFragment().At(XUri.EncodeSegment("User:"******"rc_timestamp"].AsText); cacheData.Items.Add(item); } lock(_cache) { // even though we override the entry if one was created in the meantime // we do the existence check so that we don't set up two expiration timers; if(!_cache.ContainsKey(key)) { _cacheItemCallback(key, () => { lock(_cache) { _cache.Remove(key); } }); } _cache[key] = cacheData; } result.Return(cacheData); yield break; }