public void MergesAllFullComments() { EntryPage target = TestingShared.GenerateEntryPage(); EntryPage source = TestingShared.GenerateEntryPage(true); Comment[] commentsBefore = _rh.EnumerateAll(target.Replies) .ToArray(); _rh.MergeFrom(target, source); Comment[] commentsAfter = _rh.EnumerateAll(target.Replies) .ToArray(); // No objects replaced. for (int i = 0; i < commentsBefore.Length; i++) { Assert.AreSame(commentsBefore[i], commentsAfter[i]); Assert.IsTrue( commentsAfter[i] .IsFull ); Assert.IsTrue( !string.IsNullOrWhiteSpace( commentsAfter[i] .Text ) ); } }
public void TopAuthorCommentIsAdded() { EntryPage ep = new EntryPage(); ep.Entry.Poster.Username = "******"; Comment a = new Comment(); TestingShared.SetIdAndUrls(a, 1, null); a.Poster.Username = "******"; ep.Replies.Comments.Add(a); Comment aB = new Comment(); TestingShared.SetIdAndUrls(aB, 2, a); aB.Poster.Username = "******"; a.Replies.Comments.Add(aB); List <Comment[]> result = _picker.Pick(ep); Assert.AreEqual(1, result.Count); Assert.AreEqual( 1 , result[0] .Length ); Assert.AreSame(a, result[0][0]); }
public EntryPage WorkInGivenTarget(string URI, string rootLocation, string innerFolder, string cookie) { SubfolderPassthroughNamingStrategy p = new SubfolderPassthroughNamingStrategy(innerFolder); EntryPage ret = Work(URI, rootLocation, p, cookie); return(ret); }
/// <summary>Generates a page that has a simple tree of comments.</summary> public static EntryPage GenerateEntryPage(bool makeAllFull = false, int shiftNumbers = 0) { EntryPage p = new EntryPage(); p.Entry = new Entry() { Text = "Text", Subject = "Subject", Id = 1, Poster = new UserLite() { Username = "******" }, Date = new DateTime(2015, 1, 1) }; p.CommentPages = new CommentPages() { Current = 1, Total = 2, NextUrl = new LiveJournalTarget("galkovsky", 1).ToString(), LastUrl = new LiveJournalTarget("galkovsky", 1).ToString(), }; Comment a = new Comment() { IsFull = makeAllFull, Poster = new UserLite() { Username = "******" }, Text = makeAllFull ? "1" : String.Empty, }; SetIdAndUrls(a, 11 + shiftNumbers, null); { Comment a_b = new Comment() { IsFull = true, Poster = new UserLite() { Username = "******" }, Text = "2", }; a.Replies.Comments.Add(a_b); SetIdAndUrls(a_b, 12 + shiftNumbers, a); { Comment a_b_c = new Comment() { IsFull = makeAllFull, Poster = new UserLite() { Username = "******" }, Text = makeAllFull ? "3" : String.Empty, }; a_b.Replies.Comments.Add(a_b_c); SetIdAndUrls(a_b_c, 13 + shiftNumbers, a_b); } Comment a_d = new Comment() { IsFull = makeAllFull, Poster = new UserLite() { Username = "******" }, Text = makeAllFull ? "4" : String.Empty, }; a.Replies.Comments.Add(a_d); SetIdAndUrls(a_d, 14 + shiftNumbers, a); } p.Replies.Comments.Add(a); return p; }
/// <summary> /// Adds everything possible to target from another source. /// This may include new comments, full comments, larger text etc. /// </summary> /// <param name="target">Object to add data to.</param> /// <param name="otherSource">Object to use data from.</param> public bool AddData(EntryPage target, EntryPage otherSource) { bool updated = false; // Check arguments. if (target == null || otherSource == null) { throw new ArgumentNullException(); } // Entry. log.DebugFormat("Updating content for entry page '{0}' from other source.", target); updated |= _entryHelper.UpdateWith(target.Entry, otherSource.Entry); // Comments. log.DebugFormat("Updating comments for entry page '{0}' from other source.", target); updated |= _repliesHelper.MergeFrom(target, otherSource); // CommentPages. if (target.CommentPages != null && !CommentPages.IsEmpty(target.CommentPages)) { log.DebugFormat("Cleaning comment pages information for entry page '{0}'.", target); target.CommentPages = CommentPages.Empty; updated = true; } return(updated); }
public void ParsesTestPage() { string content = TestingShared.GetFileContent("testpage_247911.xml"); LayerParser p = new LayerParser(); EntryPage page = p.ParseAsAnEntryPage(content); Assert.IsNotNull(page); Assert.IsNotNull(page.Replies); // Has userpic. Userpic userpic = page.Entry.PosterUserpic; Assert.IsNotNull(userpic); // Comment deserialization Comment firstComment = page.Replies.Comments[0]; Assert.AreEqual(91506535, firstComment.Id); Assert.IsTrue(firstComment.IsFull, "By default a comment is treated as full."); Assert.AreEqual(new DateTime(2015, 6, 25, 15, 16, 50), firstComment.Date.Value); Assert.AreEqual("1", firstComment.Text); Comment innerComment = firstComment.Replies.Comments[0]; Assert.AreEqual("http://galkovsky.livejournal.com/247911.html?thread=91589479#t91589479", innerComment.Url); // Entry text deserialization string entryText = page.Entry.Text; Assert.IsTrue(entryText.StartsWith("<p><center><img")); }
public void DownloadsCommentPagesCorrectly() { int source = 4; int total = 10; ILJClient clientMock = MockRepository.GenerateMock<ILJClient>(); ILayerParser parserMock = MockRepository.GenerateMock<ILayerParser>(); // Creates a comment pages object by page index. Func<int, CommentPages> createCPByPage = cpi => { CommentPages c = new CommentPages(); c.Current = cpi; c.Total = total; if(c.Current != 1) { c.FirstUrl = new LiveJournalTarget("galkovsky", 1, page: 1).ToString(); c.PrevUrl = new LiveJournalTarget("galkovsky", 1, page: c.Current - 1).ToString(); } if(c.Current != total) { c.LastUrl = new LiveJournalTarget("galkovsky", 1, page: total).ToString(); c.NextUrl = new LiveJournalTarget("galkovsky", 1, page: c.Current + 1).ToString(); } return c; }; clientMock.Expect(z => z.GetContent(Arg<LiveJournalTarget>.Is.NotNull, Arg<ILJClientData>.Is.Null)) .Return(null) .WhenCalled(_ => { LiveJournalTarget t = (LiveJournalTarget)_.Arguments[0]; int page = t.Page.Value; _.ReturnValue = page.ToString(); }); parserMock.Expect(z => z.ParseAsAnEntryPage(Arg<string>.Is.Anything)) .Return(null) .WhenCalled(_ => { string req = (string)_.Arguments[0]; EntryPage ep = new EntryPage(); ep.CommentPages = createCPByPage(int.Parse(req)); _.ReturnValue = ep; }); OtherPagesLoader opl = new OtherPagesLoader(parserMock, clientMock); // This is the source object we get from an entry page. CommentPages cp = createCPByPage(source); EntryPage[] others = opl.LoadOtherCommentPages(cp, null); Assert.AreEqual(total - 1, others.Length); IEnumerable<int> numbersWeExpect = Enumerable.Range(1, total).Where(z => z != source); IEnumerable<int> numbersWeHave = others.Select(z => z.CommentPages.Current); CollectionAssert.AreEqual(numbersWeExpect, numbersWeHave); }
public void EnsuresTargetCommentsPagesDataIsNull(bool startFromEmpty) { EntryPage target = startFromEmpty ? new EntryPage() : TestingShared.GenerateEntryPage(); EntryPage source = TestingShared.GenerateEntryPage(true); _eph.AddData(target, source); Assert.IsTrue(CommentPages.IsEmpty(target.CommentPages)); }
/// <summary>Gets a page from a Url.</summary> public EntryPage GetFrom(LiveJournalTarget url, ILJClientData clientData) { // Gets the string. string content = Client.GetContent(url, clientData); // Parses the string as an entry page. EntryPage p = _parser.ParseAsAnEntryPage(content); return(p); }
public void SelectsCommentsAsExpected() { EntryPage ep = TestingShared.GenerateEntryPage(true); List <Comment[]> result = _picker.Pick(ep); Assert.AreEqual(1, result.Count); Assert.AreEqual( 3 , result[0] .Length ); }
public void StoresUserpic() { string content = TestingShared.GetFileContent("testpage_247911.xml"); LayerParser p = new LayerParser(); EntryPage page = p.ParseAsAnEntryPage(content); page.Replies.Comments.Clear(); string serialized = p.Serialize(page); Assert.IsTrue(serialized.Contains("<userpic ")); }
public void CallMergeFunctions() { EntryPage target = new EntryPage(); EntryPage source = new EntryPage(); _entryHelper.Expect(z => z.UpdateWith(target.Entry, source.Entry)).Return(true); _repliesHelper.Expect(z => z.MergeFrom(target, source)).Return(true); _eph.AddData(target, source); _entryHelper.VerifyAllExpectations(); _repliesHelper.VerifyAllExpectations(); }
public void CalssInit() { ChromeOptions options = new ChromeOptions(); options.PlatformName = "windows"; options.BrowserVersion = "77.0"; _driver = new RemoteWebDriver(new Uri("http://192.168.1.101:1259/wd/hub"), options .ToCapabilities(), TimeSpan.FromSeconds(50)); _driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(50); _EntryPage = new EntryPage(_driver); _Filter = new Filter(_driver); }
public void SwapPage() { if (MainPage.activeSelf) { MainPage.SetActive(false); EntryPage.SetActive(true); SetEntryPageGame(4); } else { MainPage.SetActive(true); EntryPage.SetActive(false); SetEntryPageGame(1); } }
public void EmptyPageGetsAllDataWhenMergedInto() { Entry e1 = new Entry(); EntryPage p2 = TestingShared.GenerateEntryPage(); e1.Id = 1; p2.Entry.Id = 1; _eh.UpdateWith(e1, p2.Entry); Assert.That(e1.Text, Is.Not.Null.And.Not.Empty); Assert.That(e1.Subject, Is.Not.Null.And.Not.Empty); Assert.IsNotNull(e1.Date); Assert.AreNotEqual(default(long), e1.Id); }
public void CallMergeFunctions() { EntryPage target = new EntryPage(); EntryPage source = new EntryPage(); _entryHelper.Expect(z => z.UpdateWith(target.Entry, source.Entry)) .Return(true); _repliesHelper.Expect(z => z.MergeFrom(target, source)) .Return(true); _eph.AddData(target, source); _entryHelper.VerifyAllExpectations(); _repliesHelper.VerifyAllExpectations(); }
public void StoresUsername() { string content = TestingShared.GetFileContent("testpage_247911.xml"); LayerParser p = new LayerParser(); EntryPage page = p.ParseAsAnEntryPage(content); page.Replies.Comments.Clear(); string serialized = p.Serialize(page); page = p.ParseAsAnEntryPage(serialized); Assert.That(page.Entry.Poster.Username, Is.Not.Null.And.Not.Empty); }
private void AssertAuthorCommentsArePicked(EntryPage ep, List <Comment[]> ret) { string authorUsername = ep.Entry.Poster.Username; Comment[] authorCommentsCount = _repliesHelper.EnumerateAll(ep.Replies).Where(z => z.Poster.Username == authorUsername).ToArray(); Comment[] authorCommentsPicked = ret.SelectMany(z => z).Where(z => z.Poster.Username == authorUsername).ToArray(); Comment[] authorCommentsLeftBehind = authorCommentsCount.Except(authorCommentsPicked).ToArray(); if (authorCommentsLeftBehind.Length != 0) { // Watchdog barks. string message = String.Format("Author comments with ids {0} were left behind when picking.", String.Join(", ", authorCommentsLeftBehind.Select(z => z.Id))); log.Error(message); throw new ApplicationException(message); } }
public void InsertCommentsFromMiddlePages() { // Three original pages. EntryPage p1 = TestingShared.GenerateEntryPage(true, 0); EntryPage p2 = TestingShared.GenerateEntryPage(true, 50); EntryPage p3 = TestingShared.GenerateEntryPage(true, 100); _rh.MergeFrom(p1, p3); _rh.MergeFrom(p1, p2); Comment[] comments = _rh.EnumerateAll(p1.Replies) .ToArray(); Assert.AreEqual(12, comments.Length, "Comments should've been added."); CollectionAssert.AllItemsAreUnique(comments.Select(z => z.Id)); CollectionAssert.IsOrdered(comments.Select(z => z.Id)); }
public void RemovesDuplicateUserpicsProperly() { Userpic a = new Userpic() { Url = "ABC" }; Tuple<string, Userpic> ta = Tuple.Create("User", a); EntryPage source = new EntryPage(); EntryBaseHelper eh = MockRepository.GeneratePartialMock<EntryBaseHelper>( _fileUrlExtractor); eh.Expect(z => z.CreateUserpicTuple(Arg<EntryBase>.Is.Anything)).Return(ta); // Should return only single userpic as their URL are the same. Tuple<string, Userpic>[] result = eh.GetUserpics(new EntryBase[] {null,null}); Assert.AreEqual(1, result.Length); eh.VerifyAllExpectations(); }
public void LoginSuccess1(Dictionary <string, string> data) { EntryPage entryPage = new EntryPage(driver, data); LoginPage loginPage = new LoginPage(driver, data); HomePage homePage = new HomePage(driver, data); entryPage.GoToEntryPage(); //entryPage.VerifyEntryPageTitle(); loginPage = entryPage.ClickOnSignIn(); loginPage.InputUsername(); loginPage.InputPassword(); homePage = loginPage.ClickEnter(); //homePage.VerifyHomeTitle(); }
public List <Comment[]> Pick(EntryPage ep) { string authorUsername = ep.Entry.Poster.Username; // Get threads from each root comment. List <Comment[]> ret = ep.Replies.Comments .AsParallel().AsOrdered() .Select(rootComment => ExtractThreads(rootComment, authorUsername)) .SelectMany(a => a) .ToList(); // Make sure all author comments were picked. AssertAuthorCommentsArePicked(ep, ret); return(ret); }
public void CommentsAreSerializedOnlyWhenAreNotEmpty() { // Does the function exist with the proper name? Type t = typeof(EntryPage); string propertyName = t.GetProperties().Single(z => z.PropertyType == typeof(CommentPages)).Name; bool methodExists = t.GetMethods().Any(z => z.Name == "ShouldSerialize" + propertyName); if(!methodExists) throw new Exception("No serialization method."); // Non-empty EntryPage ep = new EntryPage(); ep.CommentPages.Total = 10; Assert.IsTrue(ep.ShouldSerializeCommentPages()); ep = new EntryPage(); Assert.IsFalse(ep.ShouldSerializeCommentPages()); }
public void TopAuthorCommentIsAdded() { EntryPage ep = new EntryPage(); ep.Entry.Poster.Username = "******"; Comment a = new Comment(); TestingShared.SetIdAndUrls(a, 1, null); a.Poster.Username = "******"; ep.Replies.Comments.Add(a); Comment a_b = new Comment(); TestingShared.SetIdAndUrls(a_b, 2, a); a_b.Poster.Username = "******"; a.Replies.Comments.Add(a_b); List<Comment[]> result = _picker.Pick(ep); Assert.AreEqual(1, result.Count); Assert.AreEqual(1, result[0].Length); Assert.AreSame(a, result[0][0]); }
//private async void LoadSomeData() //{ // if (!string.IsNullOrEmpty(Settings.AccessToken) && !string.IsNullOrEmpty(Settings.UserId)) // { // var table = await CloudService.GetTableAsync<User>(); // User user = await table.ReadItemAsync(Settings.UserId); // Settings.Username = user.Username; // } //} private void SetMainPage() { if (!string.IsNullOrEmpty(Settings.AccessToken)) { if (AzureCloudService.IsTokenExpired(Settings.AccessToken)) { var vm = new EntryPageViewModel(); vm.LoginCommand.Execute(null); } //NavPage = new NavigationPage(new MenuPage()); //MainPage = NavPage; MainPage = new MenuPage(); } //else if (!string.IsNullOrEmpty(Settings.Username) && !string.IsNullOrEmpty(Settings.Password)) // MainPage = new NavigationPage(new LoginPage()); else { MainPage = new EntryPage(); } }
public string Serialize(EntryPage ep) { UTF8Encoding enc = new UTF8Encoding(false); XmlWriterSettings settings = new XmlWriterSettings(); settings.OmitXmlDeclaration = true; settings.Indent = true; settings.ConformanceLevel = ConformanceLevel.Auto; settings.Encoding = enc; XmlSerializerNamespaces names = new XmlSerializerNamespaces(); names.Add("", ""); MemoryStream ms = new MemoryStream(); XmlWriter writer = XmlWriter.Create(ms, settings); XmlSerializer sr = new XmlSerializer(typeof(EntryPage)); sr.Serialize(writer, ep, names); return(enc.GetString(ms.ToArray())); }
public bool MergeFrom <T>(T target, T fullVersion) where T : IHasReplies { log.DebugFormat("Will merge comment tree from '{0}' with '{1}'.", target, fullVersion); // Argument check. if (fullVersion == null || target == null) { throw new ArgumentNullException(); } if (typeof(T) == typeof(Comment)) { // Items are comments. Comment a = target as Comment; Comment b = fullVersion as Comment; if (a.Id != b.Id) { string message = "Comments have different ids."; log.Error(message); throw new ArgumentNullException(message); } return(MergeCommentDataInternal(a, b)); } if (typeof(T) == typeof(EntryPage)) { EntryPage ea = target as EntryPage; EntryPage eb = fullVersion as EntryPage; return(MergeRepliesInternal(ea.Replies, eb.Replies)); } throw new NotSupportedException(); }
public bool AbsorbAllData(EntryPage freshSource, ILJClientData clientData, ref EntryPage dumpData) { bool appliedAnything = false; if (dumpData == null) { dumpData = new EntryPage(); appliedAnything = true; } appliedAnything |= _entryPageHelper.AddData(dumpData, freshSource); // TryGet all comments. EntryPage[] otherPages = _otherPagesLoader.LoadOtherCommentPages(freshSource.CommentPages, clientData); foreach (EntryPage pageX in otherPages) { appliedAnything |= _entryPageHelper.AddData(dumpData, pageX); } while (true) { IEnumerable <Comment> allFoldedComments = _repliesHelper.EnumerateRequiringFullUp(dumpData.Replies); IEnumerator <Comment> enumerator = allFoldedComments.GetEnumerator(); int foldedCommentsLeft = 0; Comment c = null; while (enumerator.MoveNext()) { foldedCommentsLeft++; if (c == null) { c = enumerator.Current; } } // How many comments left? log.Info(string.Format("Folded comments left: {0}.", foldedCommentsLeft)); if (foldedCommentsLeft == 0) { break; } LiveJournalTarget commentTarget = LiveJournalTarget.FromString(c.Url); EntryPage commentPage = GetFrom(commentTarget, clientData); Comment fullVersion = commentPage.Replies.Comments[0]; if (fullVersion.IsFull == false) { // This should be a suspended user. log.Info(string.Format("Comment {0} seems to be from a suspended user.", c)); c.IsSuspendedUser = true; continue; } log.Info(string.Format("Merging comment data for comment {0}.", c)); appliedAnything |= _repliesHelper.MergeFrom(c, fullVersion); } return(appliedAnything); }
/// <summary>Generates a page that has a simple tree of comments.</summary> public static EntryPage GenerateEntryPage(bool makeAllFull = false, int shiftNumbers = 0) { EntryPage p = new EntryPage(); p.Entry = new Entry { Text = "Text", Subject = "Subject", Id = 1, Poster = new UserLite { Username = "******" } , Date = new DateTime(2015, 1, 1) }; p.CommentPages = new CommentPages { Current = 1, Total = 2, NextUrl = new LiveJournalTarget("galkovsky", 1).ToString() , LastUrl = new LiveJournalTarget("galkovsky", 1).ToString() }; Comment a = new Comment { IsFull = makeAllFull, Poster = new UserLite { Username = "******" } , Text = makeAllFull ? "1" : string.Empty }; SetIdAndUrls(a, 11 + shiftNumbers, null); { Comment aB = new Comment { IsFull = true, Poster = new UserLite { Username = "******" }, Text = "2" }; a.Replies.Comments.Add(aB); SetIdAndUrls(aB, 12 + shiftNumbers, a); { Comment aBC = new Comment { IsFull = makeAllFull, Poster = new UserLite { Username = "******" } , Text = makeAllFull ? "3" : string.Empty }; aB.Replies.Comments.Add(aBC); SetIdAndUrls(aBC, 13 + shiftNumbers, aB); } Comment aD = new Comment { IsFull = makeAllFull, Poster = new UserLite { Username = "******" } , Text = makeAllFull ? "4" : string.Empty }; a.Replies.Comments.Add(aD); SetIdAndUrls(aD, 14 + shiftNumbers, a); } p.Replies.Comments.Add(a); return(p); }
void OnEnable() { instance = this; MainPage.SetActive(true); EntryPage.SetActive(false); }
private void HandleNativeNavigationMessage(EntryPage sender, NativeNavigationArgs args) { StartActivity(typeof(TakePhotoActivity)); }
public void AddsComplexTreeAsTwoThreads([Values(true, false)]bool firstLeafIsAuthor, [Values(true, false)] bool firstReplyIsTheSamePerson) { EntryPage ep = new EntryPage(); ep.Entry.Poster.Username = "******"; // Tree. Should become (B A C A), (X A). Comment a = new Comment(); TestingShared.SetIdAndUrls(a, 1, null); a.Poster.Username = "******"; ep.Replies.Comments.Add(a); Comment a_b = new Comment(); TestingShared.SetIdAndUrls(a_b, 2, a); a_b.Poster.Username = "******"; a.Replies.Comments.Add(a_b); Comment a_b_c = new Comment(); TestingShared.SetIdAndUrls(a_b_c, 3, a_b); a_b_c.Poster.Username = firstReplyIsTheSamePerson ? "B" : "C"; a_b.Replies.Comments.Add(a_b_c); Comment a_b_c_d = new Comment(); TestingShared.SetIdAndUrls(a_b_c_d, 4, a_b_c); a_b_c_d.Poster.Username = firstLeafIsAuthor ? "A" : "R"; a_b_c.Replies.Comments.Add(a_b_c_d); Comment a_b_e = new Comment(); TestingShared.SetIdAndUrls(a_b_e, 5, a_b); a_b_e.Poster.Username = "******"; a_b.Replies.Comments.Add(a_b_e); Comment a_b_e_f = new Comment(); TestingShared.SetIdAndUrls(a_b_e_f, 6, a_b_e); a_b_e_f.Poster.Username = "******"; a_b_e.Replies.Comments.Add(a_b_e_f); List<Comment[]> result = _picker.Pick(ep); if(firstLeafIsAuthor) { Assert.AreEqual(2, result.Count); CollectionAssert.AreEqual(new[] { a, a_b, a_b_c, a_b_c_d }, result[0]); CollectionAssert.AreEqual(new[] { a_b_e, a_b_e_f }, result[1]); } else { if(firstReplyIsTheSamePerson) { // We take a_b_c. Assert.AreEqual(2, result.Count); CollectionAssert.AreEqual(new[] { a, a_b, a_b_c }, result[0]); CollectionAssert.AreEqual(new[] { a_b_e, a_b_e_f }, result[1]); } else { // We don't take it. We don't care what he wrote. Assert.AreEqual(1, result.Count); CollectionAssert.AreEqual(new[] { a, a_b, a_b_e, a_b_e_f }, result[0]); } } }