public async Task <IReadOnlyDictionary <string, IImageIdentifier> > GetIdentifiersAsync(IEnumerable <string> imageTitles) { var props = ImageInfoProps.ExtMetadata | ImageInfoProps.Url; var imageQueryResult = await QueryImageMetadataAsync(imageTitles, props) ?? throw new ApplicationException("Null image metadata object"); // Validate response. if (!MediaWikiUtils.AssertImageInfoPropsNotNull(imageQueryResult, props)) { var errorMessage = nameof(GetIdentifiersAsync) + " cannot retrieve object image info, invalid image query result."; _logger.LogError(errorMessage); throw new ApplicationException(errorMessage); } // Response is valid here. The metadata object is present. Attributes have non-empty keys and non-null values. var originalTitles = new Dictionary <string, string>(); if (imageQueryResult.query.normalized != null) { originalTitles = imageQueryResult.query.normalized.ToDictionary(x => x.to, x => x.from); } Dictionary <string, IImageIdentifier> imageIdentifiers = new(); foreach (var page in imageQueryResult.query.pages) { var imageInfo = page.Value?.imageinfo?.SingleOrDefault() ?? throw new ApplicationException("Null image info object"); // Null safety of the following dereferences has been asserted by AssertImageInfoPropsNotNull var metaAttributes = imageInfo.extmetadata; var url = imageInfo.url; var descriptionUrl = imageInfo.descriptionurl; var normalizedTitle = page.Value.title; var matchingTitle = imageTitles.Single(x => (x == normalizedTitle || x == originalTitles[normalizedTitle])); var attributesDictionary = metaAttributes.ToDictionary(x => x.Key, x => x.Value.value); var imageIdentifier = _imageIdentifierFactory.CreateIdentifier(matchingTitle, descriptionUrl, url, attributesDictionary); imageIdentifiers.Add(matchingTitle, imageIdentifier); } return(imageIdentifiers); }
private async Task <ImageInfoRootObject?> QueryImageMetadataAsync(IEnumerable <string> imageTitles, ImageInfoProps iiProps) { // Compose request string metadataQuery = MediaWikiUtils.GetImageMetadataQuery(imageTitles, iiProps); var metadataQueryUri = new Uri(metadataQuery); _logger.LogInformation("Parse Query: " + metadataQuery); // Query the API. var responseContent = await _networkingProvider.DownloadContentAsync(metadataQueryUri); _logger.LogInformation("Parse Query response content: " + responseContent); return(JsonConvert.DeserializeObject <ImageInfoRootObject>(responseContent)); }
public async Task <IWikiArticle> GetArticleAsync(IArticleIdentifier articleIdentifier, ContentModel contentModel) { try { // Resolve license on a background task, retrieve after content has been parsed. var licenseTask = _articleLicenseProvider.GetArticleLicenseAsync(articleIdentifier); var parseQuery = MediaWikiUtils.GetParseQuery(articleIdentifier.Title, articleIdentifier.Language, contentModel); var parseQueryUri = new Uri(parseQuery); var mwResponse = await _networkingProvider.DownloadContentAsync(parseQueryUri); var mwResponseObject = JsonConvert.DeserializeObject <ParserResponse.ParserRootObject>(mwResponse); string?content = contentModel switch { ContentModel.Html => mwResponseObject?.parse?.text?["*"], ContentModel.WikiText => mwResponseObject?.parse?.wikitext?["*"], _ => throw new NotImplementedException() }; if (content == null) { var logSb = new StringBuilder().Append("Failed to deserialize MediaWiki parser output.").Append(Environment.NewLine) .Append("Media Wiki response content: ").Append(mwResponse); throw new ApplicationException(logSb.ToString()); } // retrieve license from background task var license = await licenseTask; return(_wikiMediaFactory.CreateWikiArticle(articleIdentifier, license, content, contentModel)); } catch (Exception e) { _logger.LogError(e, "Failed to retrieve article."); throw; } }
public override async void OnInspectorGUI() { scriptableData = (WikiData)target; DrawDefaultInspector(); // ---- save GUILayout.Label("Words count: " + scriptableData.Dict.Count); GUILayout.Label("Json File Path"); scriptableData.savePath = GUILayout.TextField(scriptableData.savePath); if (GUILayout.Button("Save words(json)")) { SerrializeDict(scriptableData.savePath); } if (GUILayout.Button("Load words(json)")) { DeserrializeDict(scriptableData.savePath); scriptableData.searchResults = new string[0]; scriptableData.selectedIndex = 0; } GUILayout.Label("Words File Path"); scriptableData.savePathPlain = GUILayout.TextField(scriptableData.savePathPlain); if (GUILayout.Button("Save words(txt)")) { SaveWordsAsPlainText(scriptableData.savePathPlain); } // ---- parse GUILayout.Label(scriptableData.progress); GUILayout.Label("Url"); scriptableData.categoryUrl = GUILayout.TextField(scriptableData.categoryUrl); bool parse = GUILayout.Button("Parse"); if (GUILayout.Button("Stop")) { MediaWikiUtils.run = false; } // ---- search GUILayout.Label("Search"); scriptableData.searchStartWith = GUILayout.Toggle(scriptableData.searchStartWith, "Start With"); scriptableData.searchWord = GUILayout.TextField(scriptableData.searchWord); if (GUILayout.Button("Search")) { if (scriptableData.searchStartWith) { var keys = from x in scriptableData.Dict where x.Key.StartsWith(scriptableData.searchWord) select x.Key; scriptableData.searchResults = keys.ToArray(); Debug.Log(scriptableData.searchResults.Length); } if (!scriptableData.searchStartWith) { scriptableData.searchResults = new string[0]; scriptableData.selectedIndex = 0; if (scriptableData.Dict.ContainsKey(scriptableData.searchWord)) { var data = scriptableData.Dict[scriptableData.searchWord]; scriptableData.searchResult = WordFormat(data); } else { scriptableData.searchResult = "Not found"; } } } GUILayout.TextArea(scriptableData.searchResult, GUILayout.Height(230)); scriptableData.selectedIndex = GUILayout.SelectionGrid(scriptableData.selectedIndex, scriptableData.searchResults, 1); if (scriptableData.searchResults.Length > 0) { try { string word = scriptableData.searchResults[scriptableData.selectedIndex]; if (scriptableData.Dict.ContainsKey(word)) { scriptableData.searchResult = WordFormat(scriptableData.Dict[word]); } } catch (Exception) { Debug.Log(scriptableData.selectedIndex); } } // ---- async if (parse) { if (MediaWikiUtils.run) { return; } MediaWikiUtils.run = true; string url = HttpUtility.UrlDecode(scriptableData.categoryUrl); string category = url.Substring(url.LastIndexOf(":") + 1); Debug.Log(category); var pages = await Task.Run(() => MediaWikiUtils.ParseCategory(category, ref scriptableData.progress, scriptableData.WordFilter)); var dict = await Task.Run(() => MediaWikiUtils.ParsePages(pages, ref scriptableData.progress, scriptableData.PageFilter)); dict.ToList().ForEach(x => { if (!scriptableData.Dict.ContainsKey(x.Key)) { scriptableData.Dict.Add(x.Key, x.Value); } } ); } }