protected CouchBase(XUri aBaseUri, string aUserName = null, string aPassword = null) { if (aBaseUri == null) throw new ArgumentNullException("aBaseUri"); BasePlug = Plug.New(aBaseUri).WithCredentials(aUserName, aPassword); }
//--- Methods --- private void CallbackHandler(Plug plug, string verb, XUri uri, DreamMessage request, Result<DreamMessage> response) { if(uri.Segments.Length == 0) { response.Return(DreamMessage.Ok()); return; } var segments = uri.Segments; var wikiId = segments[0]; if(wikiId.StartsWith("=")) { var id = (HostLookupOverride == null) ? DefaultWikiId : HostLookupOverride(wikiId.Substring(1)); response.Return(DreamMessage.Ok(new XDoc("wiki").Attr("id", id))); return; } if(segments.Length == 2 && segments[1] == "license") { XDoc license; if(LicenseOverride == null) { _log.Debug("returning license from disk"); license = XDocFactory.LoadFrom(Utils.Settings.LicensePath, MimeType.TEXT_XML); } else { _log.Debug("returning license from override callback"); license = LicenseOverride(wikiId); } response.Return(DreamMessage.Ok(license)); return; } var config = (ConfigOverride == null) ? DefaultConfig : ConfigOverride(wikiId); response.Return(DreamMessage.Ok(config)); }
private UpdateRecord(UpdateRecord current, XDoc meta) { Id = current.Id; ActionStack = current.ActionStack; Meta = meta; WikiId = current.WikiId; QueueIds.AddRange(current.QueueIds); }
public Yield Invoke(Plug plug, string verb, XUri uri, DreamMessage request, Result<DreamMessage> response) { // NOTE (steveb): we convert 'xri://@name/path?params' into 'http://xri.net/@name/path?params' // prepend segments with authority List<string> segments = new List<string>(); segments.Add(uri.Authority); if(uri.Segments != null) { segments.AddRange(uri.Segments); } // build new plug List<PlugHandler> preHandlers = (plug.PreHandlers != null) ? new List<PlugHandler>(plug.PreHandlers) : null; List<PlugHandler> postHandlers = (plug.PostHandlers != null) ? new List<PlugHandler>(plug.PostHandlers) : null; Plug xri = new Plug(new XUri("http", null, null, "xri.net", 80, segments.ToArray(), uri.TrailingSlash, uri.Params, uri.Fragment), plug.Timeout, request.Headers, preHandlers, postHandlers, plug.Credentials, plug.CookieJar, plug.MaxAutoRedirects); // add 'Accept' header for 'application/xrds+xml' mime-type if((xri.Headers == null) || (xri.Headers.Accept == null)) { xri = xri.WithHeader(DreamHeaders.ACCEPT, MimeType.RenderAcceptHeader(MimeType.XRDS)); } // BUGBUGBUG (steveb): this will probably fail in some cases since we may exit this coroutine before the call has completed! xri.InvokeEx(verb, request, response); yield break; }
public void Exporter_hits_export_feature_on_creation_using_reltopath() { // Arrange XUri dekiApiUri = new XUri("http://mock/@api/deki"); XDoc exportDocument = new XDoc("export") .Start("page") .Attr("path", "/") .Attr("recursive", "true") .Attr("exclude", "all") .End(); XDoc exportResponse = new XDoc("export") .Start("requests") .End() .Start("manifest") .Elem("justanode") .End(); AutoMockPlug mock = MockPlug.Register(dekiApiUri); mock.Expect("POST", dekiApiUri.At("site", "export").With("reltopath", "/foo/bar"), exportDocument, DreamMessage.Ok(exportResponse)); // Act Exporter exporter = Exporter.CreateAsync(Plug.New(dekiApiUri), exportDocument, "/foo/bar", new Result<Exporter>()).Wait(); //Assert Assert.IsTrue(mock.WaitAndVerify(TimeSpan.FromSeconds(1))); Assert.AreEqual(exportResponse["manifest"], exporter.Manifest); }
public static string GetUrlLocalUri(XUri confBaseUri, string url, bool includeQuery, bool decode) { if(string.IsNullOrEmpty(url)) { return null; } if(url.StartsWithInvariantIgnoreCase(confBaseUri.ToString())) { //Remove the wiki path prefix (everything before display generally) url = confBaseUri.SchemeHostPort + url.Substring(confBaseUri.ToString().Length); } XUri uri = XUri.TryParse(url); if(uri == null) { return null; } string ret = uri.Path; if(decode) { ret = XUri.Decode(ret); } if(includeQuery && !string.IsNullOrEmpty(uri.QueryFragment)) { ret += uri.QueryFragment; } return ret; }
public AwsEndpoint(string locationConstraint, string s3Uri, string sqsUri, string name) { S3Uri = new XUri(s3Uri); SqsUri = new XUri(sqsUri); LocationConstraint = locationConstraint; Name = name; }
//--- Constructors --- public AwsEndpoint(string locationConstraint, string s3Uri, string sqsUri) { S3Uri = new XUri(s3Uri); SqsUri = new XUri(sqsUri); LocationConstraint = locationConstraint; Name = LocationConstraint ?? "default"; }
// --- Constructors --- public RemoteInstanceManager(DekiWikiService dekiService, XUri directoryUri) : base(dekiService) { _directory = Plug.New(directoryUri); DreamMessage testMsg = _directory.GetAsync().Wait(); if (!testMsg.IsSuccessful) throw new DreamInternalErrorException(string.Format("Error validating remote deki portal service at '{0}'", directoryUri.ToString())); }
//--- Class Methods --- public void RequestLog_Insert(XUri requestUri, string requestVerb, string requestHostHeader, string origin, string serviceHost, string serviceFeature, DreamStatus responseStatus, string username, uint executionTime, string response) { string host = requestUri.HostPort; if( !host.Contains(":")) host = host + ":80"; //Schema for request log in "trunk/product/deki/web/maintenance/apirequestlog.sql" Catalog.NewQuery(@" /* RequestLog_Insert */ insert delayed into requestlog ( `rl_requesthost`, `rl_requesthostheader`, `rl_requestpath`, `rl_requestparams`, `rl_requestverb`, `rl_dekiuser`, `rl_origin`, `rl_servicehost`, `rl_servicefeature`, `rl_responsestatus`, `rl_executiontime`, `rl_response` ) values ( ?REQUESTHOST, ?REQUESTHOSTHEADER, ?REQUESTPATH, ?REQUESTPARAMS, ?REQUESTVERB, ?DEKIUSER, ?ORIGIN, ?SERVICEHOST, ?SERVICEFEATURE, ?RESPONSESTATUS, ?EXECUTIONTIME, ?RESPONSE );") .With("REQUESTHOST", host) .With("REQUESTHOSTHEADER", requestHostHeader) .With("REQUESTPATH", requestUri.Path) .With("REQUESTPARAMS", requestUri.Query) .With("REQUESTVERB", requestVerb) .With("DEKIUSER", username) .With("ORIGIN", origin == null ? String.Empty : origin.ToLowerInvariant()) .With("SERVICEHOST", serviceHost) .With("SERVICEFEATURE", serviceFeature) .With("RESPONSESTATUS", (int)responseStatus) .With("EXECUTIONTIME", executionTime) .With("RESPONSE", response) .Execute(); }
public void RegisterFunction(string functionName, MethodInfo method, DekiScriptNativeInvocationTarget.Parameter[] parameters) { var target = new DekiScriptNativeInvocationTarget(null, method, parameters.ToArray()); var function = new DekiScriptInvocationTargetDescriptor(target.Access, false, false, functionName, target.Parameters, target.ReturnType, "", "", target); var functionPointer = new XUri("native:///").At(function.SystemName); Functions[functionPointer] = function; _funcMap[functionName] = functionPointer; }
protected CouchBase(XUri baseUri, string username = null, string password = null) { if (baseUri == null) throw new ArgumentNullException("baseUri"); BasePlug = Plug.New(baseUri).WithCredentials(username, password); }
protected override Yield Start(XDoc config, Result result) { yield return Coroutine.Invoke(base.Start, config, new Result()); _timeoutSecs = config["timeout"].AsInt ?? _timeoutSecs; _authUri = config["authentication-uri"].AsUri; if(_authUri == null) { throw new ArgumentNullException("authentication-uri"); } result.Return(); }
public void ExportManager_chains_exporter_to_packager() { // Arrange XUri dekiApiUri = new XUri("http://mock/@api/deki"); XDoc exportDocument = new XDoc("export"); XUri item1Uri = dekiApiUri.At("foo", "bar", "abc"); XDoc item1Doc = new XDoc("item1"); XUri item2Uri = dekiApiUri.At("foo", "bar", "def"); XDoc item2Doc = new XDoc("item2"); XDoc exportResponse = new XDoc("export") .Start("requests") .Start("request") .Attr("method", "GET") .Attr("dataid", "abc") .Attr("href", item1Uri) .Start("header").Attr("name", "h_1").Attr("value", "v_1").End() .Start("header").Attr("name", "h_2").Attr("value", "v_2").End() .End() .Start("request") .Attr("method", "GET") .Attr("dataid", "def") .Attr("href", item2Uri) .End() .End() .Start("manifest") .Start("foo").Attr("dataid", "abc").End() .Start("bar").Attr("dataid", "def").End() .End(); AutoMockPlug mock = MockPlug.Register(dekiApiUri); mock.Expect().Verb("POST").Uri(dekiApiUri.At("site", "export").With("relto", "0")).RequestDocument(exportDocument).Response(DreamMessage.Ok(exportResponse)); mock.Expect().Verb("GET").Uri(item1Uri).RequestHeader("h_1", "v_1").RequestHeader("h_2", "v_2").Response(DreamMessage.Ok(item1Doc)); mock.Expect().Verb("GET").Uri(item2Uri).Response(DreamMessage.Ok(item2Doc)); var writes = new List<string>(); var mockPackageWriter = new Mock<IPackageWriter>(); mockPackageWriter.Setup(x => x.WriteDataAsync(It.IsAny<ExportItem>(), It.IsAny<Result>())) .Returns(() => new Result().WithReturn()) .Callback((ExportItem item, Result result) => writes.Add(item.DataId)) .AtMost(2) .Verifiable(); mockPackageWriter.Setup(x => x.WriteManifest(It.IsAny<XDoc>(), It.IsAny<Result>())) .Returns(() => new Result().WithReturn()) .AtMostOnce() .Verifiable(); // Act ExportManager manager = ExportManager.CreateAsync(Plug.New(dekiApiUri), exportDocument, 0, mockPackageWriter.Object, new Result<ExportManager>()).Wait(); manager.ExportAsync(new Result()).Wait(); // Assert Assert.IsTrue(mock.WaitAndVerify(TimeSpan.FromSeconds(1)), mock.VerificationFailure); Assert.AreEqual(2, manager.TotalItems); Assert.AreEqual(2, manager.CompletedItems); Assert.AreEqual(new[] { "abc", "def" }, writes.ToArray()); mockPackageWriter.Verify(x => x.Dispose(), Times.Once()); mockPackageWriter.VerifyAll(); }
//--- Methods --- public int GetScoreWithNormalizedUri(XUri uri, out XUri normalized) { normalized = uri; switch(uri.Scheme) { case "resource": return Plug.BASE_ENDPOINT_SCORE; default: return 0; } }
//--- Methods --- public int GetScoreWithNormalizedUri(XUri uri, out XUri normalized) { normalized = uri; switch(uri.Scheme) { case "xri": return 1; default: return 0; } }
//--- Interface Methods --- int IPlugEndpoint.GetScoreWithNormalizedUri(XUri uri, out XUri normalized) { XDoc doc; normalized = uri; lock(_map) { if(!_map.TryGetValue(uri, out doc)) { return 0; } } return uri.MaxSimilarity; }
//--- Class Methods --- /// <summary> /// Add a document for a uri. /// </summary> /// <param name="uri">Uri to intercept.</param> /// <param name="doc">Document to return for interception.</param> public static void Add(XUri uri, XDoc doc) { if(uri == null) { throw new ArgumentException("uri"); } if(doc == null) { throw new ArgumentException("doc"); } lock(_map) { _map[uri] = doc; } }
//--- Constructors --- public ParserResult(XDoc content, string contentType, Title redirectsTo, XUri redirectToUri, bool hasScriptContent, List<Title> links, List<Title> templates, List<string> tags) { this.Content = content; this.ContentType = contentType; this.RedirectsToTitle = redirectsTo; this.RedirectsToUri = redirectToUri; this.HasScriptContent = hasScriptContent; this.Links = links; this.Templates = templates; this.Tags = tags; _bodyText = null; _summary = null; }
//--- Constructors --- public RemoteInstanceManager(DekiWikiService dekiService, TaskTimerFactory timerFactory, XUri directoryUri, string tempPath) : base(dekiService, timerFactory) { // validate temp folder _tempPath = tempPath; if(!Directory.Exists(_tempPath)) { throw new ArgumentException("temp folder does not exist", "tempPath"); } // check remote directory _directory = Plug.New(directoryUri); var testMsg = _directory.GetAsync().Wait(); if(!testMsg.IsSuccessful) { _log.WarnFormat("Error validating remote deki portal service at '{0}'", directoryUri); } }
public override DekiScriptInvocationTargetDescriptor ResolveRegisteredFunctionUri(XUri uri) { var result = base.ResolveRegisteredFunctionUri(uri); if(result != null) { return result; } var deki = DekiContext.Current.Instance; var found = (from extension in deki.RunningServices.ExtensionServices from function in extension.Extension.Functions where function.Uri == uri select function).FirstOrDefault(); if(found != null) { // TODO (steveb): we shouldn't have to create a descriptor on the fly return new DekiScriptInvocationTargetDescriptor(DreamAccess.Public, false, false, found.Name, new DekiScriptParameter[0], DekiScriptType.ANY, null, null, null); } return null; }
public void ImportManager_chains_reader_to_importer() { // Arrange var dekiApiUri = new XUri("http://mock/@api/deki"); var importManifest = new XDoc("manifest"); var item1Uri = dekiApiUri.At("foo", "bar", "abc"); var item1Doc = new XDoc("item1"); var item2Uri = dekiApiUri.At("foo", "bar", "def"); var item2Doc = new XDoc("item2"); var importResponse = new XDoc("requests") .Start("request") .Attr("method", "POST") .Attr("dataid", "abc") .Attr("href", item1Uri) .Start("header").Attr("name", "h_1").Attr("value", "v_1").End() .Start("header").Attr("name", "h_2").Attr("value", "v_2").End() .End() .Start("request") .Attr("method", "PUT") .Attr("dataid", "def") .Attr("href", item2Uri) .End(); var mock = MockPlug.Register(dekiApiUri); mock.Expect().Verb("POST").Uri(dekiApiUri.At("site", "import").With("relto", "0")).RequestDocument(importManifest).Response(DreamMessage.Ok(importResponse)); mock.Expect().Verb("POST").Uri(item1Uri).RequestHeader("h_1", "v_1").RequestHeader("h_2", "v_2").RequestDocument(item1Doc); mock.Expect().Verb("PUT").Uri(item2Uri).RequestDocument(item2Doc); var mockPackageReader = new Mock<IPackageReader>(); mockPackageReader.Setup(x => x.ReadManifest(It.IsAny<Result<XDoc>>())).Returns(importManifest.AsResult()).Verifiable("didn't get manifest"); var item1stream = new MemoryStream(item1Doc.ToBytes()); mockPackageReader.Setup(x => x.ReadData(It.Is<ImportItem>(y => y.DataId == "abc"), It.IsAny<Result<ImportItem>>())) .Returns(() => new ImportItem("abc", importResponse["request[@dataid='abc']"], null, item1stream, item1stream.Length).AsResult()) .Verifiable(); var item2stream = new MemoryStream(item2Doc.ToBytes()); mockPackageReader.Setup(x => x.ReadData(It.Is<ImportItem>(y => y.DataId == "def"), It.IsAny<Result<ImportItem>>())) .Returns(() => new ImportItem("def", importResponse["request[@dataid='def']"], null, item2stream, item2stream.Length).AsResult()) .Verifiable(); mockPackageReader.Setup(x => x.Dispose()).Verifiable(); // Act var manager = ImportManager.CreateAsync(Plug.New(dekiApiUri), 0, mockPackageReader.Object, new Result<ImportManager>()).Wait(); manager.ImportAsync(new Result()).Wait(); //Assert Assert.IsTrue(mock.WaitAndVerify(TimeSpan.FromSeconds(1)), mock.VerificationFailure); mockPackageReader.VerifyAll(); }
//--- Methods --- public XDoc ToXml(XUri uri) { XDoc result = new XDoc("function"); result.Attr("transform", Transform); if(IsProperty) { result.Attr("usage", "property"); } result.Elem("name", Name); result.Elem("uri", uri); result.Elem("description", Description); if(Access != DreamAccess.Public) { result.Elem("access", Access.ToString().ToLowerInvariant()); } foreach(DekiScriptParameter param in Parameters) { param.AppendXml(result); } result.Start("return").Attr("type", DekiScriptLiteral.AsScriptTypeName(ReturnType)).End(); return result; }
public Yield Invoke(Plug plug, string verb, XUri uri, DreamMessage request, Result<DreamMessage> response) { // we only support GET as verb DreamMessage reply; if((verb != Verb.GET) && (verb != Verb.HEAD)) { reply = new DreamMessage(DreamStatus.MethodNotAllowed, null, null); reply.Headers.Allow = Verb.GET + "," + Verb.HEAD; } else { bool head = (verb == Verb.HEAD); // try to load the assembly System.Reflection.Assembly assembly = System.Reflection.Assembly.Load(uri.Host); Version version = assembly.GetName().Version; DateTime timestamp = new DateTime(2000, 1, 1).AddDays(version.Build).AddSeconds(version.Revision * 2); // check if request is just about re-validation if(!head && request.CheckCacheRevalidation(timestamp)) { reply = DreamMessage.NotModified(); } else { try { System.IO.Stream stream = assembly.GetManifestResourceStream(uri.Path.Substring(1)); if(stream != null) { MimeType mime = MimeType.New(uri.GetParam(DreamOutParam.TYPE, null)) ?? MimeType.BINARY; reply = new DreamMessage(DreamStatus.Ok, null, mime, stream.Length, head ? System.IO.Stream.Null : stream); if(head) { stream.Close(); } else { reply.SetCacheMustRevalidate(timestamp); } } else { reply = DreamMessage.NotFound("could not find resource"); } } catch(System.IO.FileNotFoundException) { reply = DreamMessage.NotFound("could not find resource"); } catch(Exception e) { reply = DreamMessage.InternalError(e); } } } response.Return(reply); yield break; }
public void Importer_hits_import_feature_with_reltopath() { // Arrange XUri dekiApiUri = new XUri("http://mock/@api/deki"); XDoc importManifest = new XDoc("manifest"); XDoc importResponse = new XDoc("requests") .Start("request").Attr("dataid", "a").End() .Start("request").Attr("dataid", "b").End() .Start("request").Attr("dataid", "c").End(); AutoMockPlug mock = MockPlug.Register(dekiApiUri); mock.Expect("POST", dekiApiUri.At("site", "import").With("reltopath", "/foo/bar"), importManifest, DreamMessage.Ok(importResponse)); // Act Importer importer = Importer.CreateAsync(Plug.New(dekiApiUri), importManifest, "/foo/bar", new Result<Importer>()).Wait(); //Assert Assert.IsTrue(mock.WaitAndVerify(TimeSpan.FromSeconds(1))); Assert.AreEqual(importManifest, importer.Manifest); Assert.AreEqual(new[] { "a", "b", "c" }, importer.Items.Select(x => x.DataId).ToArray()); }
private DreamCookie(string name, string value, XUri uri, DateTime expires, int version, bool secure, bool discard, string comment, XUri commentUri, bool httpOnly, bool skipContextDiscovery) { if(string.IsNullOrEmpty(name)) { throw new ArgumentException("Name cannot be empty"); } _name = name; _value = value; if(uri != null) { _uri = uri.WithoutQuery().WithoutCredentials().WithoutFragment().AsLocalUri(); if(!skipContextDiscovery) { DreamContext dc = DreamContext.CurrentOrNull; if(dc != null) { _publicUri = dc.PublicUri; _localMachineUri = dc.Env.LocalMachineUri; } } } // auto-convert very old expiration dates to max since they are most likely bogus if(expires.Year < 2000) { expires = DateTime.MaxValue; } if(expires != DateTime.MaxValue) { expires = expires.ToUniversalTime(); // need to trim milliseconds of the passed in date expires = new DateTime(expires.Year, expires.Month, expires.Day, expires.Hour, expires.Minute, expires.Second, 0, DateTimeKind.Utc).ToUniversalTime(); } // initialize cookie _expires = expires; _version = version; _secure = secure; _discard = discard; _comment = comment; _commentUri = commentUri; _httpOnly = httpOnly; }
//--- Constructor --- public SubscriptionManager(XUri destination, List<Tuplet<string, List<XDoc>>> subscriptions) { _destination = destination; _recordChangeQueue = new ProcessingQueue<UserInfo>(RecordsChange_Helper, 1); _subscriptionChangeQueue = new ProcessingQueue<Empty>(UpdateSubscriptions_Helper, 1); if(subscriptions == null) { return; } foreach(Tuplet<string, List<XDoc>> subscription in subscriptions) { string wikiId = subscription.Item1; SiteInfo siteInfo = new SiteInfo(wikiId); _subscriptions.Add(wikiId, siteInfo); foreach(XDoc userDoc in subscription.Item2) { UserInfo userInfo = UserInfo.FromXDoc(wikiId, userDoc); if(userInfo == null) { continue; } lock(siteInfo) { siteInfo.Users.Add(userInfo.Id, userInfo); } userInfo.ResourcesChanged += OnSubscriptionChange; userInfo.DataChanged += OnRecordsChange; } } }
private Tuplet<MimeType, XDoc> MakeNewsFeed(IEnumerable<RecentChangeEntry> recentchanges, XUri feedUri, string feedTitle, FeedFormat format, DateTime since) { var resources = DekiContext.Current.Resources; var changes = new List<RecentChangeEntry>(); DekiContext deki = DekiContext.Current; bool diffCacheEnabled = deki.Instance.RecentChangesDiffCaching; // check if we need to merge change entries MimeType mime = MimeType.XML; if((format == FeedFormat.ATOM_DAILY) || (format == FeedFormat.RAW_DAILY)) { // combine changes that occurred on the same day Dictionary<string, DigestLookupEntry> pageLookup = new Dictionary<string, DigestLookupEntry>(); Dictionary<string, DigestLookupEntry> commentLookup = new Dictionary<string, DigestLookupEntry>(); Dictionary<string, ulong> commentDescriptToCommentLookup = new Dictionary<string, ulong>(); List<Dictionary<string, KeyValuePair<string, int>>> authors = new List<Dictionary<string, KeyValuePair<string, int>>>(); int index = 0; foreach(var change in recentchanges) { ulong pageId = change.CurId; if(pageId == 0) { // should never happen, but if it does, just ignore this entry continue; } DateTime timestamp = change.Timestamp; NS ns = change.Namespace; RC type = change.Type; string author = change.Username; string fullname = change.Fullname ?? change.Username; // check if we processing a comment or page change if(Utils.IsPageComment(type)) { ulong commentId = change.CmntId ?? 0; string comment = change.Comment; if(commentId == 0) { // NOTE (steveb): because the recentchanges table is brain dead, we sometimes cannot associate a comment change with the comment that affected it; // luckily, when that happens, there is a good chance that the description for the change is the same as an earlier one; // so all we need to do is to lookup the previous change using the current change description. if(!commentDescriptToCommentLookup.TryGetValue(comment ?? string.Empty, out commentId)) { continue; } } else if(comment != null) { commentDescriptToCommentLookup[comment] = commentId; } // remove revision number (not applicable) change.Revision = 0; // check if we need to merge this change with a previous one DigestLookupEntry entry; string key = string.Format("{0}-{1}", commentId, timestamp.DayOfYear); if(commentLookup.TryGetValue(key, out entry)) { var item = changes[entry.Index]; ++item.EditCount; // append the change comments if(item.ExtraComments == null) { item.ExtraComments = new List<Tuplet<string, string, string>>(); // first add the existing comment to the list item.ExtraComments.Add(new Tuplet<string, string, string>(item.Username, item.Fullname, item.Comment)); } item.ExtraComments.Add(new Tuplet<string, string, string>(change.Username, change.Fullname, change.Comment)); // updated edit count for author KeyValuePair<string, int> authorEdits; authors[entry.Index].TryGetValue(author, out authorEdits); authors[entry.Index][author] = new KeyValuePair<string, int>(fullname, authorEdits.Value + 1); } else { change.EditCount = 1; // NOTE (steveb): we always create the lookup to create a discontinuity with previous changes on the same page; // this causes ungroupable changes (e.g. MOVE) to split groupable changes; thus avoiding // that these groupable changes get inproperly grouped since they aren't continuous. // create a new entry, either because this page has no existing entry yet, or the change cannot be grouped with other changes commentLookup[key] = new DigestLookupEntry(timestamp, index, type); authors.Add(new Dictionary<string, KeyValuePair<string, int>>()); authors[authors.Count - 1].Add(author, new KeyValuePair<string, int>(fullname, 1)); changes.Add(change); ++index; } } else { // add a default edit count if(change.EditCount == 0) { change.EditCount = Utils.IsPageEdit(type) ? 1 : 0; } // check if we need to merge this change with a previous one DigestLookupEntry entry; string key = string.Format("{0}-{1}-{2}", ns, pageId, timestamp.DayOfYear); if(pageLookup.TryGetValue(key, out entry) && Utils.IsPageModification(type) && Utils.IsPageModification(entry.Type)) { var item = changes[entry.Index]; // update 'rc_last_oldid' to reflect the older page id of the combined records if(Utils.IsPageEdit(type)) { item.LastOldId = change.LastOldId; item.EditCount = item.EditCount + 1; if(change.Revision != 0) { item.PreviousRevision = change.Revision - 1; } } // append the change comments if(item.ExtraComments == null) { item.ExtraComments = new List<Tuplet<string, string, string>>(); // first add the existing comment to the list item.ExtraComments.Add(new Tuplet<string, string, string>(item.Username, item.Fullname, item.Comment)); } item.ExtraComments.Add(new Tuplet<string, string, string>(change.Username, change.Fullname, change.Comment)); // updated edit count for author KeyValuePair<string, int> authorEdits; authors[entry.Index].TryGetValue(author, out authorEdits); authors[entry.Index][author] = new KeyValuePair<string, int>(fullname, authorEdits.Value + 1); } else { // NOTE (steveb): we always create the lookup to create a discontinuity with previous changes on the same page; // this causes ungroupable changes (e.g. MOVE) to split groupable changes; thus avoiding // that these groupable changes get inproperly grouped since they aren't continuous. // create a new entry, either because this page has no existing entry yet, or the change cannot be grouped with other changes pageLookup[key] = new DigestLookupEntry(timestamp, index, type); authors.Add(new Dictionary<string, KeyValuePair<string, int>>()); authors[authors.Count - 1].Add(author, new KeyValuePair<string, int>(fullname, 1)); // check if page was changed if(Utils.IsPageEdit(type)) { // update previous revision number change.PreviousRevision = change.Revision - 1; } else if(Utils.IsPageModification(type)) { // set previous revision number change.PreviousRevision = change.Revision; } changes.Add(change); ++index; } } } // create list of authors as comment line for(int i = 0; i < changes.Count; ++i) { var change = changes[i]; // create an array of (fullname, username) author names var sortedAuthors = (from author in authors[i] select new KeyValuePair<string, string>(author.Key, author.Value.Key)).ToList(); sortedAuthors.Sort((x, y) => StringComparer.OrdinalIgnoreCase.Compare(x.Value, y.Value)); string authorList = Utils.LinguisticJoin(from author in sortedAuthors select (string.IsNullOrEmpty(author.Value) ? author.Key : author.Value), resources.Localize(DekiResources.AND())); // add-up all edit operations int editTotal = 0; foreach(KeyValuePair<string, int> edits in authors[i].Values) { editTotal += edits.Value; } // reset comment for standard edits RC type = change.Type; if(Utils.IsPageModification(type) || Utils.IsPageComment(type)) { string summary = null; switch(editTotal) { case 2: summary = resources.Localize(DekiResources.EDIT_SUMMARY_TWO(authorList, editTotal)); break; case 1: summary = resources.Localize(DekiResources.EDIT_SUMMARY_ONE(authorList, editTotal)); break; case 0: break; default: summary = resources.Localize(DekiResources.EDIT_SUMMARY_MANY(authorList, editTotal)); break; } change.Summary = summary; } // reflect that multiple authors edited article, if appropriate change.SortedAuthors = sortedAuthors; } // check if only the digest format was requested if(format == FeedFormat.RAW_DAILY) { XDoc digest = new XDoc("digest"); foreach(var change in changes) { change.AppendXml(digest); } return new Tuplet<MimeType, XDoc>(mime, digest); } } else if(format == FeedFormat.ATOM_ALL) { // keep all changes foreach(var change in recentchanges) { if(Utils.IsPageEdit(change.Type)) { change.PreviousRevision = change.Revision - 1; } else { change.Revision = 0; } changes.Add(change); } } else if(format == FeedFormat.DAILY_ACTIVITY) { // need to establish how many pages and users exist in total var pagesTotal = (int)DbUtils.CurrentSession.Pages_GetCount(); var usersTotal = (int)DbUtils.CurrentSession.Users_GetCount(); // daily activity format XDoc table = new XDoc("activity").Attr("type", "daily"); DateTime missing = DateTime.UtcNow.Date; foreach(var change in from recentchange in recentchanges where (recentchange.Namespace == NS.MAIN) || (recentchange.Namespace == NS.USER) group recentchange by recentchange.Timestamp.Date into recentchangesByDate select new { Date = recentchangesByDate.Key, // count as edited pages, pages that were not created or deleted the same day PagesEdited = recentchangesByDate.Where(rc => (rc.Type == RC.EDIT) && !recentchangesByDate.Any(rc2 => (rc.CurId == rc2.CurId) && ((rc2.Type == RC.NEW) || (rc2.Type == RC.PAGERESTORED) || (rc.Type == RC.PAGEDELETED)))).Distinct(rc => rc.CurId).Count(), // count as created pages, pages that were not deleted later the same day PagesCreated = recentchangesByDate.Count(rc => ((rc.Type == RC.NEW) || (rc.Type == RC.PAGERESTORED)) && !recentchangesByDate.Any(rc2 => (rc2.CurId == rc.CurId) && (rc2.Id < rc.Id) && (rc.Type == RC.PAGEDELETED))), // count as deleted pages, pages that were not created or restored earlier the same day PagesDeleted = recentchangesByDate.Count(rc => (rc.Type == RC.PAGEDELETED) && !recentchangesByDate.Any(rc2 => (rc.CurId == rc2.CurId) && (rc2.Id > rc.Id) && ((rc2.Type == RC.NEW) || (rc2.Type == RC.PAGERESTORED)))), // simple counting of created users UsersCreated = recentchangesByDate.Count(rc => rc.Type == RC.USER_CREATED) } ) { // check if we need to add empty entries for missing days for(; missing > change.Date; missing = missing.AddDays(-1)) { table.Start("entry").Attr("date", missing) .Elem("pages.total", pagesTotal) .Elem("pages.created", 0) .Elem("pages.edited", 0) .Elem("pages.deleted", 0) .Elem("users.total", usersTotal) .Elem("users.created", 0) .End(); } // add this day's entry table.Start("entry").Attr("date", change.Date) .Elem("pages.total", pagesTotal) .Elem("pages.created", change.PagesCreated) .Elem("pages.edited", change.PagesEdited) .Elem("pages.deleted", change.PagesDeleted) .Elem("users.total", usersTotal) .Elem("users.created", change.UsersCreated) .End(); // NOTE (steveb): pages total might become negative if user created didn't actually create a user page pagesTotal -= change.PagesCreated - change.PagesDeleted + change.UsersCreated; usersTotal -= change.UsersCreated; // indicate that current is *not* missing missing = change.Date.AddDays(-1); } // pad with missing records for(; missing >= since; missing = missing.AddDays(-1)) { table.Start("entry").Attr("date", missing) .Elem("pages.total", pagesTotal) .Elem("pages.created", 0) .Elem("pages.edited", 0) .Elem("pages.deleted", 0) .Elem("users.total", usersTotal) .Elem("users.created", 0) .End(); } return new Tuplet<MimeType, XDoc>(mime, table); } else { // unknown or RAW format XDoc table = new XDoc("table"); foreach(var change in recentchanges) { change.AppendXml(table); } return new Tuplet<MimeType, XDoc>(mime, table); } // compose feed document mime = MimeType.ATOM; XAtomFeed feed = new XAtomFeed(feedTitle, feedUri, DateTime.UtcNow) { Language = deki.Instance.SiteLanguage, Id = feedUri }; Dictionary<string, XDoc> cache = new Dictionary<string, XDoc>(); foreach(var change in changes) { RC type = change.Type; if(Utils.IsPageHiddenOperation(type)) { // no real content to produce; let's skip it continue; } // build feed content Title title = Title.FromDbPath(change.Namespace, change.Title, null); XDoc description = new XDoc("div"); AppendDiff(diffCacheEnabled, description, change, type, title, cache); // add item to feed try { DateTime timestamp = change.Timestamp; XAtomEntry entry = feed.StartEntry(title.AsPrefixedUserFriendlyPath(), timestamp, timestamp); XUri id = XUri.TryParse(Utils.AsPublicUiUri(title)); if(id != null) { if(id.Segments.Length == 0) { id = id.WithTrailingSlash(); } entry.Id = id.WithFragment(DbUtils.ToString(change.Timestamp)); } entry.AddAuthor(((change.SortedAuthors == null) || (change.SortedAuthors.Count == 1)) ? (string.IsNullOrEmpty(change.Fullname) ? change.Username : change.Fullname) : resources.Localize(DekiResources.EDIT_MULTIPLE()), null, null); entry.AddLink(new XUri(Utils.AsPublicUiUri(title)), XAtomBase.LinkRelation.Alternate, null, null, null); entry.AddSummary(MimeType.XHTML, description); feed.End(); } catch(Exception e) { _log.ErrorExceptionMethodCall(e, "MakeNewsFeed", title.AsPrefixedDbPath()); } } // insert <ins> styles foreach(XDoc ins in feed[".//ins"]) { ins.Attr("style", "color: #009900;background-color: #ccffcc;text-decoration: none;"); } // insert <del> styles foreach(XDoc del in feed[".//del"]) { del.Attr("style", "color: #990000;background-color: #ffcccc;text-decoration: none;"); } return new Tuplet<MimeType, XDoc>(mime, feed); }
private Tuplet<MimeType, XDoc> MakeNewsFeedCached(Func<IEnumerable<RecentChangeEntry>> recentchanges, XUri feedUri, string feedTitle, string feedName, List<string> feedNameSuffixes, FeedFormat format, DateTime since) { DekiContext deki = DekiContext.Current; TimeSpan feedCacheTtl = deki.Instance.RecentChangesFeedCachingTtl; // cache the feed if caching is enabled, if an ATOM format is requested, and the user is not logged in if((feedCacheTtl > TimeSpan.Zero) && ((format == FeedFormat.ATOM_ALL) || (format == FeedFormat.ATOM_DAILY)) && UserBL.IsAnonymous(deki.User)) { // compute complete feed name if(feedNameSuffixes.Count > 0) { feedName += "(" + string.Join(",", feedNameSuffixes.ToArray()) + ")"; } feedName += ".xml"; // check if there is a cached version of the feed Plug store = Storage.At("site_" + XUri.EncodeSegment(DekiContext.Current.Instance.Id), DreamContext.Current.Culture.Name, "users", string.Format("user_{0}", DekiContext.Current.User.ID), feedName); var v = store.Get(new Result<DreamMessage>(TimeSpan.MaxValue)).Wait(); XDoc cachedFeed = (v.IsSuccessful && v.HasDocument) ? v.ToDocument() : null; if(cachedFeed != null) { // let's validate the timestamp on the feed as well (just in case the cache storage didn't remove the item) DateTime now = DateTime.UtcNow; DateTime updated = cachedFeed["_:updated"].AsDate ?? now; if(now.Subtract(updated) < feedCacheTtl) { return new Tuplet<MimeType, XDoc>(MimeType.ATOM, cachedFeed); } } var result = MakeNewsFeed(recentchanges(), feedUri, feedTitle, format, since); if(!result.Item2.IsEmpty) { store.With("ttl", feedCacheTtl.TotalSeconds).Put(result.Item2, new Result<DreamMessage>(TimeSpan.MaxValue)).Block(); } return result; } return MakeNewsFeed(recentchanges(), feedUri, feedTitle, format, since); }
public XDoc Meter( [DekiExtParam("meter width")] int width, [DekiExtParam("meter height")] int height, [DekiExtParam("meter position (between 0 and 100)")] int value, [DekiExtParam("meter label (default: none)", true)] string label, [DekiExtParam("meter colors (e.g. [ \"ff0000\", \"00ff00\", \"0000ff\" ]; default: nil)", true)] ArrayList colors ) { XUri uri = new XUri("http://chart.apis.google.com/chart") .With("chs", string.Format("{0}x{1}", width, height)) .With("cht", "gom") .With("chd", "t:" + Math.Max(0, Math.Min(value, 100))); if(!string.IsNullOrEmpty(label)) { uri = uri.With("chl", label); } if(colors != null) { uri = uri.With("chco", string.Join(",", AsStrings(colors))); } return new XDoc("html").Start("body").Start("img").Attr("src", uri).End().End(); }