/// <summary> /// Creates a list of <see cref="WikiPage"/> based on JSON query result. /// </summary> /// <param name="site">A <see cref="Site"/> object.</param> /// <param name="queryNode">The <c>qurey</c> node value object of JSON result.</param> /// <param name="options"></param> /// <returns>Retrieved pages.</returns> internal static IList <WikiPage> FromJsonQueryResult(WikiSite site, JObject queryNode, IWikiPageQueryProvider options) { if (site == null) { throw new ArgumentNullException(nameof(site)); } if (queryNode == null) { throw new ArgumentNullException(nameof(queryNode)); } var pages = (JObject)queryNode["pages"]; if (pages == null) { return(EmptyPages); } // If query.xxx.index exists, sort the pages by the given index. // This is specifically used with SearchGenerator, to keep the search result in order. // For other generators, this property simply does not exist. // See https://www.mediawiki.org/wiki/API_talk:Query#On_the_order_of_titles_taken_out_of_generator . return(pages.Properties().OrderBy(page => (int?)page.Value["index"]) .Select(page => { var newInst = new WikiPage(site, 0); MediaWikiHelper.PopulatePageFromJson(newInst, (JObject)page.Value, options); return newInst; }).ToList()); }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object> > EnumListParameters() { var dict = new Dictionary <string, object> { { "srsearch", Keyword }, { "srnamespace", NamespaceIds == null ? "*" : MediaWikiHelper.JoinValues(NamespaceIds) }, { "srwhat", MatchingField }, { "srlimit", PaginationSize }, { "srinterwiki", IncludesInterwiki }, { "srbackend", BackendName } }; // Include redirect pages in the search. From 1.23 onwards, redirects are always included. (Removed in 1.23) if (Site.SiteInfo.Version < new MediaWikiVersion(1, 23)) { dict["srredirects"] = true; } dict["srwhat"] = MatchingField switch { SearchableField.Title => "title", SearchableField.Text => "text", SearchableField.NearMatch => "nearmatch", _ => throw new ArgumentOutOfRangeException() }; return(dict); }
/// <inheritdoc /> /// <remarks> /// To logout the user, this override sends a POST request to <c>https://www.wikia.com/logout</c>. /// </remarks> protected override async Task SendLogoutRequestAsync() { string logoutUrl; if (SiteInfo.ServerUrl.EndsWith(".wikia.com", StringComparison.OrdinalIgnoreCase)) { logoutUrl = "https://www.wikia.com/logout"; } else if (SiteInfo.ServerUrl.EndsWith(".wikia.org", StringComparison.OrdinalIgnoreCase)) { logoutUrl = "https://www.wikia.org/logout"; } else if (SiteInfo.ServerUrl.EndsWith(".fandom.com", StringComparison.OrdinalIgnoreCase)) { logoutUrl = "https://www.fandom.com/logout"; } else { logoutUrl = MediaWikiHelper.MakeAbsoluteUrl(SiteInfo.ServerUrl, "logout"); // User is using WikiaSite instance outside Wikia… I wish you good luck. this.Logger.LogWarning("WikiaSite is instantiated with a non-FANDOM site URL: {Url}. Assuming logout URL as {LogoutUrl}.", SiteInfo.ServerUrl, logoutUrl); } await WikiClient.InvokeAsync(logoutUrl, new MediaWikiFormRequestMessage(new { redirect = "" }), DiscardingResponseMessageParser.Instance, CancellationToken.None); }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object> > EnumParameters(MediaWikiVersion version) { return(new OrderedKeyValuePairs <string, object> { { "ppprop", SelectedProperties == null ? null : MediaWikiHelper.JoinValues(SelectedProperties) } }); }
private IEnumerable <KeyValuePair <string, object> > EnumParams(bool isList) { var prefix = isList ? null : "g"; var dict = new Dictionary <string, object> { { prefix + "rcdir", TimeAscending ? "newer" : "older" }, { prefix + "rcstart", StartTime }, { prefix + "rcend", EndTime }, { prefix + "rcnamespace", NamespaceIds == null ? null : MediaWikiHelper.JoinValues(NamespaceIds) }, { prefix + "rcuser", UserName }, { prefix + "rcexcludeuser", ExcludedUserName }, { prefix + "rctag", Tag }, { prefix + "rctype", ParseRecentChangesTypes(TypeFilters) }, { prefix + "rcshow", ParseFilters() }, { prefix + "rctoponly", LastRevisionsOnly }, { prefix + "rclimit", PaginationSize } }; if (isList) { var fields = "user|userid|comment|parsedcomment|flags|timestamp|title|ids|sizes|redirect|loginfo|tags|sha1"; if (Site.AccountInfo.HasRight(UserRights.Patrol)) { fields += "|patrolled"; } dict.Add("rcprop", fields); } return(dict); }
protected internal virtual void OnLoadPageInfo(JObject jpage, IWikiPageQueryProvider options) { // Initialize propertyGroups?.Clear(); // Update page stub PageStub = MediaWikiHelper.PageStubFromJson(jpage); // Load page info // Invalid page title (like File:) if (PageStub.IsInvalid) { return; } // Load property groups foreach (var group in options.ParsePropertyGroups(jpage)) { Debug.Assert(group != null, "The returned sequence from IWikiPageQueryParameters.ParsePropertyGroups contains null item."); if (propertyGroups == null) { propertyGroups = new List <IWikiPagePropertyGroup>(); } propertyGroups.Add(group); } // Check if the client has requested for revision content… LastRevision = GetPropertyGroup <RevisionsPropertyGroup>()?.LatestRevision; if (LastRevision?.Content != null) { Content = LastRevision.Content; } LastFileRevision = GetPropertyGroup <FileInfoPropertyGroup>()?.LatestRevision; pageInfo = GetPropertyGroup <PageInfoPropertyGroup>(); LastRevisionId = pageInfo?.LastRevisionId ?? 0; ContentModel = pageInfo?.ContentModel; }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object> > EnumListParameters() { if ((TargetTitle != null) == (TargetPageId != null)) { throw new ArgumentException(string.Format(Prompts.ExceptionArgumentExpectEitherNull2, nameof(TargetTitle), nameof(TargetPageId))); } var actualPaginationSize = PaginationSize; if (AllowRedirectedLinks) { // When the blredirect parameter is set, this module behaves slightly differently. // bllimit applies to both levels separately: if e.g. bllimit=10, // at most 10 first-level pages (pages that link to bltitle) and // 10 second-level pages (pages that link to bltitle through a redirect) will be listed. // Continuing queries also works differently. actualPaginationSize = Math.Max(1, PaginationSize / 2); } return(new Dictionary <string, object> { { "bltitle", TargetTitle }, { "blpageid", TargetPageId }, { "blnamespace", NamespaceIds == null ? null : MediaWikiHelper.JoinValues(NamespaceIds) }, { "blfilterredir", RedirectsFilter.ToString("redirects", "nonredirects") }, { "bllimit", actualPaginationSize }, { "blredirect", AllowRedirectedLinks } }); }
/// <summary> /// Construct a sequence of <see cref="WikiPageStub"/> from the given page IDs. /// </summary> /// <param name="site">The site in which to query for the pages.</param> /// <param name="ids">The page IDs to query.</param> /// <exception cref="ArgumentNullException">Either <paramref name="site"/> or <paramref name="ids"/> is <c>null</c>.</exception> /// <returns>A sequence of <see cref="WikiPageStub"/> containing the page information.</returns> /// <remarks>For how the missing pages are handled, see the "remarks" section of <see cref="WikiPage"/>.</remarks> public static IAsyncEnumerable <WikiPageStub> FromPageIds(WikiSite site, IEnumerable <int> ids) { return(AsyncEnumerableFactory.FromAsyncGenerator <WikiPageStub>(async(sink, ct) => { var titleLimit = site.AccountInfo.HasRight(UserRights.ApiHighLimits) ? 500 : 50; foreach (var partition in ids.Partition(titleLimit)) { var jresult = await site.InvokeMediaWikiApiAsync(new MediaWikiFormRequestMessage(new { action = "query", pageids = MediaWikiHelper.JoinValues(partition), }), ct); Debug.Assert(jresult["query"] != null); var jpages = jresult["query"]["pages"]; foreach (var id in partition) { var jpage = jpages[id.ToString(CultureInfo.InvariantCulture)]; if (jpage["missing"] == null) { sink.Yield(new WikiPageStub(id, (string)jpage["title"], (int)jpage["ns"])); } else { sink.Yield(new WikiPageStub(id, MissingPageTitle, UnknownNamespaceId)); } } await sink.Wait(); } })); }
private void OnDeserialized(StreamingContext context) { if (GetValueDirect("duplicateversions") is JArray jversions && jversions.Count > 0) { var versions = jversions.Select(v => MediaWikiHelper.ParseDateTime((string)v["timestamp"])).ToList(); DuplicateVersions = new ReadOnlyCollection <DateTime>(versions); }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object?> > EnumListParameters() { return(new Dictionary <string, object> { { "tlnamespace", NamespaceIds == null ? null : MediaWikiHelper.JoinValues(NamespaceIds) }, { "tllimit", PaginationSize }, { "tltemplates", MatchingTitles == null ? null : MediaWikiHelper.JoinValues(MatchingTitles) }, { "tldir", OrderDescending ? "descending" : "ascending" } }); }
/// <summary> /// Convert name-value paris to URL query format. /// This overload handles <see cref="ExpandoObject"/> as well as anonymous objects. /// </summary> /// <remarks> /// <para> /// The key-value pair with null value will be excluded. To specify a key with empty value, /// consider using <see cref="string.Empty"/> . /// </para> /// <para> /// For <see cref="bool"/> values, if the value is true, a pair with key and empty value /// will be generated; otherwise the whole pair will be excluded. /// </para> /// <para> /// If <paramref name="values"/> is <see cref="IEnumerable{T}"/> of <see cref="KeyValuePair{TKey,TValue}"/> /// of strings, the values will be returned with no further processing. /// </para> /// </remarks> public static IEnumerable <KeyValuePair <string, string> > ToWikiStringValuePairs(object values) { if (values is IEnumerable <KeyValuePair <string, string> > pc) { return(pc); } return(MediaWikiHelper.EnumValues(values) .Select(p => new KeyValuePair <string, string>(p.Key, ToWikiQueryValue(p.Value)))); }
/// <inheritdoc /> /// <param name="fieldCollection">A dictionary or anonymous object containing the key-value pairs. See <see cref="MediaWikiHelper.EnumValues"/> for more information.</param> /// <param name="forceMultipartFormData">Forces the message to be marshaled as multipart/form-data, regardless of the fields.</param> /// <exception cref="ArgumentNullException"><paramref name="fieldCollection"/> is <c>null</c>.</exception> public MediaWikiFormRequestMessage(string id, object fieldCollection, bool forceMultipartFormData) : base(id) { if (fieldCollection == null) { throw new ArgumentNullException(nameof(fieldCollection)); } fields = new List <KeyValuePair <string, object> >(MediaWikiHelper.EnumValues(fieldCollection)); AsMultipartFormData = forceMultipartFormData || fields.Any(p => p.Value is Stream); }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object> > EnumListParameters() { return(new Dictionary <string, object> { { "eititle", TargetTitle }, { "einamespace", NamespaceIds == null ? null : MediaWikiHelper.JoinValues(NamespaceIds) }, { "eifilterredir", RedirectsFilter.ToString("redirects", "nonredirects") }, { "eilimit", PaginationSize } }); }
public async Task Issue67() { var site = await WpEnSiteAsync; var items = await new CategoriesGenerator(site) { PageTitle = MediaWikiHelper.JoinValues(new[] { "Test", ".test", "Test_(Unix)", "Test_(assessment)" }), }.EnumItemsAsync().ToListAsync(); ShallowTrace(items); }
/// <summary>Initializes a new <see cref="WikiaSiteOptions"/> instance from the root URL of a Wikia site.</summary> /// <param name="siteRootUrl">Wikia site root URL, with the ending slash. e.g. <c>http://community.wikia.com/</c>.</param> /// <exception cref="ArgumentNullException"><paramref name="siteRootUrl"/> is <c>null</c>.</exception> public WikiaSiteOptions(string siteRootUrl) { if (siteRootUrl == null) { throw new ArgumentNullException(nameof(siteRootUrl)); } ApiEndpoint = MediaWikiHelper.MakeAbsoluteUrl(siteRootUrl, "api.php"); ScriptUrl = MediaWikiHelper.MakeAbsoluteUrl(siteRootUrl, "index.php"); NirvanaEndPointUrl = MediaWikiHelper.MakeAbsoluteUrl(siteRootUrl, "wikia.php"); WikiaApiRootUrl = MediaWikiHelper.MakeAbsoluteUrl(siteRootUrl, "api/v1"); }
internal void ApplyBasePath(string basePath) { if (UserPageUrl != null) { UserPageUrl = MediaWikiHelper.MakeAbsoluteUrl(basePath, UserPageUrl); } if (AvatarUrl != null) { AvatarUrl = MediaWikiHelper.MakeAbsoluteUrl(basePath, AvatarUrl); } }
internal void ApplyBasePath(string basePath) { if (Url != null) { Url = MediaWikiHelper.MakeAbsoluteUrl(basePath, Url); } if (ImageUrl != null) { ImageUrl = MediaWikiHelper.MakeAbsoluteUrl(basePath, ImageUrl); } }
/// <summary> /// Refresh a sequence of revisions by revid, along with their owner pages. /// </summary> /// <remarks> /// <para>If there's invalid revision id in <paramref name="revIds"/>, an <see cref="ArgumentException"/> will be thrown while enumerating.</para> /// </remarks> public static IAsyncEnumerable <Revision> FetchRevisionsAsync(WikiSite site, IEnumerable <int> revIds, IWikiPageQueryProvider options, CancellationToken cancellationToken) { if (revIds == null) { throw new ArgumentNullException(nameof(revIds)); } var queryParams = options.EnumParameters(site.SiteInfo.Version).ToDictionary(); // Remove any rvlimit magic word generated by RevisionsPropertyProvider. // We are only fetching by revisions. queryParams.Remove("rvlimit"); var titleLimit = options.GetMaxPaginationSize(site.SiteInfo.Version, site.AccountInfo.HasRight(UserRights.ApiHighLimits)); return(AsyncEnumerableFactory.FromAsyncGenerator <Revision>(async sink => { // Page ID --> Page Stub var stubDict = new Dictionary <int, WikiPageStub>(); var revDict = new Dictionary <int, Revision>(); using (site.BeginActionScope(null, (object)revIds)) { foreach (var partition in revIds.Partition(titleLimit)) { site.Logger.LogDebug("Fetching {Count} revisions from {Site}.", partition.Count, site); queryParams["revids"] = MediaWikiHelper.JoinValues(partition); var jobj = await site.InvokeMediaWikiApiAsync(new MediaWikiFormRequestMessage(queryParams), cancellationToken); var jpages = (JObject)jobj["query"]["pages"]; // Generate stubs first foreach (var p in jpages) { var jrevs = p.Value["revisions"]; if (jrevs == null || !jrevs.HasValues) { continue; } var id = Convert.ToInt32(p.Key); if (!stubDict.TryGetValue(id, out var stub)) { stub = new WikiPageStub(id, (string)p.Value["title"], (int)p.Value["ns"]); stubDict.Add(id, stub); } foreach (var jrev in jrevs) { var rev = jrev.ToObject <Revision>(Utility.WikiJsonSerializer); rev.Page = stub; revDict.Add(rev.Id, rev); } } await sink.YieldAndWait(partition.Select(id => revDict.TryGetValue(id, out var rev) ? rev : null)); } } })); }
/// <summary>Initializes a new <see cref="WikiaSiteOptions"/> instance from the information in <see cref="WikiSite"/>.</summary> /// <param name="site">The existing wiki site instance.</param> /// <exception cref="ArgumentNullException"><paramref name="site"/> is <c>null</c>.</exception> public WikiaSiteOptions(WikiSite site) { if (site == null) { throw new ArgumentNullException(nameof(site)); } ApiEndpoint = site.ApiEndpoint; var siteInfo = site.SiteInfo; ScriptUrl = MediaWikiHelper.MakeAbsoluteUrl(siteInfo.ServerUrl, siteInfo.ScriptFilePath); NirvanaEndPointUrl = MediaWikiHelper.MakeAbsoluteUrl(siteInfo.ServerUrl, "wikia.php"); WikiaApiRootUrl = MediaWikiHelper.MakeAbsoluteUrl(siteInfo.ServerUrl, "api/v1"); }
private RevisionsPropertyGroup(WikiPageStub page, JArray jrevisions) { if (jrevisions.Count == 1) { _Revisions = MediaWikiHelper.RevisionFromJson((JObject)jrevisions.First, page); } else { _Revisions = new ReadOnlyCollection <Revision>(jrevisions .Select(jr => MediaWikiHelper.RevisionFromJson((JObject)jr, page)) .ToArray()); } }
/// <inheritdoc /> /// <summary> /// Initializes a <see cref="WikiaQueryRequestMessage"/> instance with /// the message ID and query fields. /// </summary> /// <param name="fieldCollection"> /// A dictionary or anonymous object containing the key-value pairs. /// See <see cref="MediaWikiHelper.EnumValues"/> for more information. /// For queries without query part, you can set this parameter to <c>null</c>. /// </param> /// <param name="httpPost">Whether to use HTTP POST method to issue the request.</param> public WikiaQueryRequestMessage(string id, object fieldCollection, bool httpPost) : base(id) { if (fieldCollection == null) { fields = null; readonlyFields = Array.Empty <KeyValuePair <string, object> >(); } else { fields = MediaWikiHelper.EnumValues(fieldCollection).ToList(); } UseHttpPost = httpPost; }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object> > EnumParameters(MediaWikiVersion version) { var p = new OrderedKeyValuePairs <string, object> { { "clprop", "sortkey|timestamp|hidden" }, { "clshow", HiddenCategoryFilter.ToString("hidden", "!hidden", null) } }; if (CategorySelection != null) { p.Add("clcategories", MediaWikiHelper.JoinValues(CategorySelection)); } return(p); }
/// <inheritdoc/> public override IEnumerable <KeyValuePair <string, object> > EnumListParameters() { if (string.IsNullOrEmpty(CategoryTitle)) { throw new InvalidOperationException(string.Format(Prompts.ExceptionArgumentIsEmpty1, nameof(CategoryTitle))); } return(new Dictionary <string, object> { { "cmtitle", CategoryTitle }, { "cmlimit", PaginationSize }, { "cmnamespace", NamespaceIds == null ? null : MediaWikiHelper.JoinValues(NamespaceIds) }, { "cmtype", ParseMemberTypes(MemberTypes) } }); }
/// <inheritdoc /> public override IEnumerable <KeyValuePair <string, object> > EnumParameters(MediaWikiVersion version) { var p = new OrderedKeyValuePairs <string, object> { { "lhprop", "pageid|title|redirect" }, { "lhshow", RedirectFilter.ToString("redirect", "!redirect", null) } }; if (NamespaceSelection != null) { p.Add("lhnamespace", MediaWikiHelper.JoinValues(NamespaceSelection)); } return(p); }
public static async Task RefreshEntitiesAsync(IEnumerable <Entity> entities, EntityQueryOptions options, IEnumerable <string> languages, CancellationToken cancellationToken) { if (entities == null) { throw new ArgumentNullException(nameof(entities)); } var langs = languages == null ? null : MediaWikiHelper.JoinValues(languages); if (string.IsNullOrEmpty(langs)) { langs = null; } // You can even fetch pages from different sites. foreach (var siteEntities in entities.GroupBy(p => p.Site)) { var site = siteEntities.Key; var req = BuildQueryOptions(langs, options); req["action"] = "wbgetentities"; var titleLimit = site.AccountInfo.HasRight(UserRights.ApiHighLimits) ? 500 : 50; using (site.BeginActionScope(entities, options)) { foreach (var partition in siteEntities.Partition(titleLimit).Select(partition => partition.ToList())) { //site.Logger.LogDebug("Fetching {Count} pages from {Site}.", partition.Count, site); // We use ids to query pages. req["ids"] = MediaWikiHelper.JoinValues(partition.Select(p => p.Id)); var jresult = await site.InvokeMediaWikiApiAsync(new MediaWikiFormRequestMessage(req), cancellationToken); var jentities = (JObject)jresult["entities"]; foreach (var entity in partition) { var jentity = jentities[entity.Id]; // We can write Q123456 as q123456 in query params, but server will return Q123456 anyway. if (jentity == null) { jentity = jentities.Properties().FirstOrDefault(p => string.Equals(p.Name, entity.Id, StringComparison.OrdinalIgnoreCase)); } if (jentity == null) { throw new UnexpectedDataException($"Cannot find the entity with id {entity.Id} in the response."); } entity.LoadFromJson(jentity, options, false); } } } } }
public static int ParseContinuationParameters(JToken jresult, IDictionary <string, object> queryParams, IDictionary <string, object> continuationParams) { var continuation = FindQueryContinuationParameterRoot(jresult); // No more results. if (continuation == null || continuation.Count == 0) { return(CONTINUATION_DONE); } var anyNewValue = false; continuationParams?.Clear(); foreach (var p in continuation.Properties()) { object parsed; if (p.Value is JValue value) { parsed = value.Value; } else { parsed = p.Value.ToString(Formatting.None); } if (!queryParams.TryGetValue(p.Name, out var existingValue) || !ValueEquals(existingValue, parsed)) { anyNewValue = true; } continuationParams?.Add(new KeyValuePair <string, object>(p.Name, parsed)); } return(anyNewValue ? CONTINUATION_AVAILABLE : CONTINUATION_LOOP); bool ValueEquals(object existing, object incoming) { if (Equals(existing, incoming)) { return(true); } if (existing is DateTime dt && incoming is string s) { if (MediaWikiHelper.TryParseDateTime(s, out var dt2)) { // We have called ToUniversalTime() in ToWikiStringValuePairs. return(dt.ToUniversalTime() == dt2.ToUniversalTime()); } } return(false); } }
static bool ValueEquals(object existing, object incoming) { if (Equals(existing, incoming)) { return(true); } if (existing is DateTime dt && incoming is string s) { if (MediaWikiHelper.TryParseDateTime(s, out var dt2)) { // We have called ToUniversalTime() in ToWikiStringValuePairs. return(dt.ToUniversalTime() == dt2.ToUniversalTime()); } } return(false); }
internal static RevisionsPropertyGroup Create(JObject jpage) { var jrevisions = jpage["revisions"]; if (jrevisions == null) { return(null); } if (!jrevisions.HasValues) { return(Empty); } var stub = MediaWikiHelper.PageStubFromJson(jpage); return(new RevisionsPropertyGroup(stub, (JArray)jrevisions)); }
/// <summary> /// Construct a sequence of <see cref="WikiPageStub"/> from the given page titles. /// </summary> /// <param name="site">The site in which to query for the pages.</param> /// <param name="titles">The page IDs to query.</param> /// <exception cref="ArgumentNullException">Either <paramref name="site"/> or <paramref name="titles"/> is <c>null</c>.</exception> /// <returns>A sequence of <see cref="WikiPageStub"/> containing the page information.</returns> /// <remarks>For how the missing pages are handled, see the "remarks" section of <see cref="WikiPage"/>.</remarks> public static IAsyncEnumerable <WikiPageStub> FromPageTitles(WikiSite site, IEnumerable <string> titles) { if (site == null) { throw new ArgumentNullException(nameof(site)); } if (titles == null) { throw new ArgumentNullException(nameof(titles)); } return(AsyncEnumerableFactory.FromAsyncGenerator <WikiPageStub>(async(sink, ct) => { var titleLimit = site.AccountInfo.HasRight(UserRights.ApiHighLimits) ? 500 : 50; foreach (var partition in titles.Partition(titleLimit)) { var jresult = await site.InvokeMediaWikiApiAsync(new MediaWikiFormRequestMessage(new { action = "query", titles = MediaWikiHelper.JoinValues(partition), }), ct); Debug.Assert(jresult["query"] != null); // Process title normalization. var normalizedDict = jresult["query"]["normalized"]?.ToDictionary(n => (string)n["from"], n => (string)n["to"]); var pageDict = ((JObject)jresult["query"]["pages"]).Properties() .ToDictionary(p => (string)p.Value["title"], p => p.Value); foreach (var name in partition) { if (normalizedDict == null || !normalizedDict.TryGetValue(name, out var normalizedName)) { normalizedName = name; } var jpage = pageDict[normalizedName]; if (jpage["missing"] == null) { sink.Yield(new WikiPageStub((int)jpage["pageid"], (string)jpage["title"], (int)jpage["ns"])); } else { sink.Yield(new WikiPageStub(MissingPageIdMask, (string)jpage["title"], (int)jpage["ns"])); } } await sink.Wait(); } })); }
private GeoCoordinatesPropertyGroup(JArray jcoordinates) { if (jcoordinates != null && jcoordinates.HasValues) { var jprimary = jcoordinates.FirstOrDefault(c => c["primary"] != null); if (jprimary != null) { PrimaryCoordinate = MediaWikiHelper.GeoCoordinateFromJson((JObject)jcoordinates.First); PrimaryDistance = (int?)jcoordinates.First["dist"] ?? 0; } if (jprimary == null || jcoordinates.Count > 1) { var coordinates = jcoordinates.Select(c => MediaWikiHelper.GeoCoordinateFromJson((JObject)c)).ToArray(); _Coordinates = new ReadOnlyCollection <GeoCoordinate>(coordinates); } } }