public async Task FetchUpdatedPostsUnitTest() { ConnectorTask taskInfo = new ConnectorTask(); taskInfo.StartTime = DateTime.Parse("2018-01-09"); taskInfo.EndTime = DateTime.Parse("2018-01-11"); taskInfo.JobId = "job1"; taskInfo.TaskId = "task1"; taskInfo.TenantId = "tenant1"; taskInfo.DirtyEntities = new List <string>() { "a", "b" }; var mockDownloader = new Mock <IDownloader>(); mockDownloader.SetupSequence(x => x.GetWebContent <PostListFB, ErrorsFB>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(JsonConvert.DeserializeObject <PostListFB>("{\"data\": []}")); mockDownloader.SetupSequence(x => x.GetWebContent <Dictionary <string, PostFB>, ErrorsFB>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(JsonConvert.DeserializeObject <Dictionary <string, PostFB> >(File.ReadAllText(@"FakeData\FakeDirtyPosts.json"))); FakeUploader uploader = new FakeUploader(); JobProcessorFB job = new JobProcessorFB(mockDownloader.Object, uploader); string sourceInfo = "{\"PageId\":\"123\",\"AccessToken\":\"Fake\",\"PageName\":\"Fake123\"}"; await job.FetchData(taskInfo, sourceInfo); Assert.AreEqual(uploader.fakeStorage.Count, 2); }
public void QueryFBTests() { ConnectorTask taskInfo = new ConnectorTask() { StartTime = DateTime.Parse("2018-01-01"), EndTime = DateTime.Parse("2018-09-17"), DirtyEntities = new List <string> { "123", "456" }, }; SourceInfoFB sourceInfo = new SourceInfoFB() { PageId = "123" }; string expectedFeedUrl = "https://graph.facebook.com:443/v3.0/123/feed?fields=id,created_time,from{name,id,picture},to,message,story,likes.summary(true),reactions.summary(true),comments{from{name,id,picture},id,created_time,message,parent{id},comment_count,like_count,attachment,message_tags,comments{from{name,id,picture},id,created_time,message,parent{id},comment_count,like_count,attachment,message_tags}},attachments,source,message_tags,type,status_type&since=2018-01-01T00:00:00&until=2018-09-17T00:00:00"; string actualFeedUrl = QueryFB.GetFeedUrl(taskInfo, sourceInfo); Assert.AreEqual(expectedFeedUrl, actualFeedUrl); string expectedUpdatePostsUrl = "https://graph.facebook.com:443/v3.0/?ids=123,456&fields=id,created_time,from{name,id,picture},to,message,story,likes.summary(true),reactions.summary(true),comments{from{name,id,picture},id,created_time,message,parent{id},comment_count,like_count,attachment,message_tags,comments{from{name,id,picture},id,created_time,message,parent{id},comment_count,like_count,attachment,message_tags}},attachments,source,message_tags,type,status_type"; string actualUpdatePostsUrl = QueryFB.GetUpdatedPostsUrl(taskInfo); Assert.AreEqual(expectedUpdatePostsUrl, actualUpdatePostsUrl); string expectedVideoSourceUrl = "https://graph.facebook.com:443/v3.0/123?fields=source"; string actualVideoSourceUrl = QueryFB.GetVideoUrl("123"); Assert.AreEqual(expectedVideoSourceUrl, actualVideoSourceUrl); }
public async Task FetchData_WhenSinceIdisMax_ThenNoDataFetched() { var tweets = JsonConvert.DeserializeObject <List <Tweet> >(File.ReadAllText(@"FakeTweets.json")); var max = tweets.Select(t => long.Parse(t.Tweetid)).ToList <long>().Max().ToString(); downloader.Setup(x => x.GetWebContent <List <Tweet>, ErrorsTwitter>(It.Is <string>(s => s == $"https://api.twitter.com/1.1/statuses/user_timeline.json?include_entities=true&count=200&include_rts=true&sinceId={max}"), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(JsonConvert.DeserializeObject <List <Tweet> >("[]")); ConnectorTask connectorTask = new ConnectorTask { TenantId = "tenantId", JobId = "j1", TaskId = "t1", StartTime = DateTime.UtcNow.AddMonths(-2), EndTime = DateTime.UtcNow, DirtyEntities = null, BlobSasUri = "dummyUri" }; SourceInfoTwitter sourceInfo = new SourceInfoTwitter() { SinceId = max, }; jobProcessor = new JobProcessorTwitter(downloader.Object, uploader.Object, new TwitterSchemaToItemMapper()); var listTweets = await jobProcessor.FetchData(connectorTask, JsonConvert.SerializeObject(sourceInfo)); Assert.IsTrue(listTweets.Count == 0); downloader.Verify(m => m.GetWebContent <List <Tweet>, ErrorsTwitter>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>()), Times.Once); uploader.Verify(x => x.UploadItem(It.IsAny <string>(), It.IsAny <string>(), It.IsAny <Item>()), Times.Never); }
public async Task ErrorsFBUnitTest() { ConnectorTask taskInfo = new ConnectorTask(); taskInfo.StartTime = DateTime.Parse("2018-01-09"); taskInfo.EndTime = DateTime.Parse("2018-01-11"); taskInfo.JobId = "job1"; taskInfo.TaskId = "task1"; taskInfo.TenantId = "tenant1"; var mockDownloader = new Mock <IDownloader>(); mockDownloader.Setup(x => x.GetWebContent <PostListFB, ErrorsFB>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>())) .Throws(new ClientException <ErrorsFB>(JsonConvert.DeserializeObject <ErrorsFB>(File.ReadAllText(@"FakeData\FakeError.json")))); FakeUploader uploader = new FakeUploader(); JobProcessorFB job = new JobProcessorFB(mockDownloader.Object, uploader); string sourceInfo = "{\"PageId\":\"123\",\"AccessToken\":\"Fake\",\"PageName\":\"Fake123\"}"; try { await job.FetchData(taskInfo, sourceInfo); } catch (ClientException <ErrorsFB> error) { Assert.AreEqual(error.error.Error.ErrorMessage, "Message describing the error"); Assert.AreEqual(error.error.Error.ErrorType, "OAuthException"); } Assert.AreEqual(uploader.fakeStorage.Count, 0); }
public async Task FetchData_WhenSinceIdisZero_ThenDataFetched() { var tweets = JsonConvert.DeserializeObject <List <Tweet> >(File.ReadAllText(@"FakeTweets.json")); downloader.Setup(x => x.GetWebContent <List <Tweet>, ErrorsTwitter>(It.Is <string>(s => s == "https://api.twitter.com/1.1/statuses/user_timeline.json?include_entities=true&count=200&include_rts=true&tweet_mode=extended"), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(tweets); downloader.Setup(x => x.GetWebContent <List <Tweet>, ErrorsTwitter>(It.Is <string>(s => s != "https://api.twitter.com/1.1/statuses/user_timeline.json?include_entities=true&count=200&include_rts=true&tweet_mode=extended"), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(JsonConvert.DeserializeObject <List <Tweet> >("[]")); ConnectorTask connectorTask = new ConnectorTask { TenantId = "tenantId", JobId = "j1", TaskId = "t1", StartTime = new DateTime(2018, 12, 01), EndTime = new DateTime(2019, 05, 01), DirtyEntities = null, BlobSasUri = "dummyUri" }; SourceInfoTwitter sourceInfo = new SourceInfoTwitter() { SinceId = "0", }; jobProcessor = new JobProcessorTwitter(downloader.Object, uploader.Object, new TwitterSchemaToItemMapper()); var listTweets = await jobProcessor.FetchData(connectorTask, JsonConvert.SerializeObject(sourceInfo)); tweets.RemoveAll(t => DateTime.Compare(DateTime.ParseExact(t.CreatedAt, "ddd MMM dd HH:mm:ss +ffff yyyy", new System.Globalization.CultureInfo("en-US")), connectorTask.StartTime) < 0); tweets.RemoveAll(t => DateTime.Compare(DateTime.ParseExact(t.CreatedAt, "ddd MMM dd HH:mm:ss +ffff yyyy", new System.Globalization.CultureInfo("en-US")), connectorTask.EndTime) > 0); Assert.AreEqual(listTweets.Count, tweets.Count); mockRepo.VerifyAll(); }
private async Task <string> GetSourceInfoFromTable(ConnectorTask taskInfo) { Expression <Func <PageJobEntity, bool> > filter = (entity => entity.RowKey == taskInfo.JobId); List <PageJobEntity> pageJobEntityList = await azureTableProvider.QueryEntitiesAsync <PageJobEntity>(PageJobMappingTable, filter); PageJobEntity pageJobEntity = pageJobEntityList?[0]; return(pageJobEntity.SourceInfo); }
/// <summary> /// Fetches Data wrt given time interval /// </summary> /// <param name="taskInfo">contains the time stamps for which data is to be fetched</param> /// <param name="sourceInfo">source Info</param> public override async Task <List <ItemMetadata> > FetchData(ConnectorTask taskInfo, string sourceInfo) { SourceInfoFB fbSourceInfo = JsonConvert.DeserializeObject <SourceInfoFB>(sourceInfo); List <ItemMetadata> itemMetadata = new List <ItemMetadata>(); await FetchPosts(taskInfo, fbSourceInfo, itemMetadata); await FetchUpdates(taskInfo, fbSourceInfo, itemMetadata); return(itemMetadata); }
override public bool Equals(object o) { ConnectorTask ct = o as ConnectorTask; bool eq = false; if (ct != null) { eq = ct._ips.Equals(_ips); } return(eq); }
public async Task UpdateSourceInfo(ConnectorTask taskInfo, SourceInfoTwitter twitterSourceInfo) { PageJobMappingTable = azureTableProviderInstance.GetAzureTableReference(Settings.PageJobMappingTableName); Expression <Func <PageJobEntity, bool> > filter = (entity => entity.RowKey == taskInfo.JobId); CloudTable pageJobMappingTable = azureTableProviderInstance.GetAzureTableReference(Settings.PageJobMappingTableName); List <PageJobEntity> pageJobEntityList = await azureTableProviderInstance.QueryEntitiesAsync <PageJobEntity>(pageJobMappingTable, filter); PageJobEntity pageJobEntity = pageJobEntityList[0]; pageJobEntity.SourceInfo = JsonConvert.SerializeObject(twitterSourceInfo); await azureTableProviderInstance.InsertOrReplaceEntityAsync(PageJobMappingTable, pageJobEntity); }
public static string GetFeedUrl(ConnectorTask taskInfo, SourceInfoFB sourceInfo) { UriBuilder uriBuilder = new UriBuilder(SettingsFB.FacebookBaseUrl); uriBuilder.Path += $"/{sourceInfo.PageId}/feed"; var query = HttpUtility.ParseQueryString(uriBuilder.Query); query["fields"] = SettingsFB.FacebookQueryFields; query["since"] = taskInfo.StartTime.ToString("yyyy-MM-ddTHH:mm:ss"); query["until"] = taskInfo.EndTime.ToString("yyyy-MM-ddTHH:mm:ss"); uriBuilder.Query = query.ToString(); return(HttpUtility.UrlDecode(uriBuilder.ToString())); }
public static string GetUpdatedPostsUrl(ConnectorTask taskInfo) { UriBuilder uriBuilder = new UriBuilder(SettingsFB.FacebookBaseUrl); uriBuilder.Path += $"/"; string ids = string.Join(",", taskInfo.DirtyEntities); var query = HttpUtility.ParseQueryString(uriBuilder.Query); query["ids"] = ids; query["fields"] = SettingsFB.FacebookQueryFields; uriBuilder.Query = query.ToString(); return(HttpUtility.UrlDecode(uriBuilder.ToString())); }
/// <summary> /// Fetches Data wrt given Tweet ID /// </summary> /// <param name="taskInfo">contains the TweetID for which data is to be fetched</param> public override async Task <List <ItemMetadata> > FetchData(ConnectorTask taskInfo, string sourceInfo) { Trace.TraceInformation("Data fetch Started"); List <ItemMetadata> itemMetaData = new List <ItemMetadata>(); SourceInfoTwitter twitterSourceInfo = JsonConvert.DeserializeObject <SourceInfoTwitter>(sourceInfo); OAuth1Token token = new OAuth1Token(SettingsTwitter.TwitterApiKey, SettingsTwitter.TwitterApiSecretKey, twitterSourceInfo.ClientToken, twitterSourceInfo.ClientSecret); var filterTime = taskInfo.EndTime; OAuth1Helper oAuth1Helper = new OAuth1Helper(url, token, HttpMethod.Get.ToString().ToUpperInvariant()); while (true) { Dictionary <string, string> param = getParams(taskInfo, twitterSourceInfo); string queryString = oAuth1Helper.GetQueryString(param); string authHeader = oAuth1Helper.GenerateAuthorizationHeader(); AuthenticationHeaderValue header = new AuthenticationHeaderValue("OAuth", authHeader); List <Tweet> tweets = await downloader.GetWebContent <List <Tweet>, ErrorsTwitter>(queryString, header); bool isScheduleCompleted = false; if (tweets != null && tweets.Any()) { var minId = tweets.Select(t => long.Parse(t.Tweetid)).ToList <long>().Min().ToString() ?? twitterSourceInfo.SinceId; isScheduleCompleted = DateTime.Compare(DateTime.ParseExact(tweets.Where(t => t.Tweetid.Equals(minId)).First().CreatedAt, Const_TwitterDateTemplate, new System.Globalization.CultureInfo("en-US")), taskInfo.EndTime) > 0; } if (tweets == null || tweets.Count == 0 || isScheduleCompleted) { break; // When no new data to get since sinceID(last fetched tweet) } twitterSourceInfo.SinceId = tweets.Select(t => long.Parse(t.Tweetid)).ToList <long>().Max().ToString(); tweets.RemoveAll(t => DateTime.Compare(DateTime.ParseExact(t.CreatedAt, Const_TwitterDateTemplate, new System.Globalization.CultureInfo("en-US")), taskInfo.StartTime) < 0); tweets.RemoveAll(t => DateTime.Compare(DateTime.ParseExact(t.CreatedAt, Const_TwitterDateTemplate, new System.Globalization.CultureInfo("en-US")), taskInfo.EndTime) > 0); Trace.TraceInformation($"Tweets Fetched {tweets.Count}"); if (tweets.Any()) { foreach (var tweet in tweets) { var enrichedTweet = await EnrichTweetWithAttachments(tweet); itemMetaData.Add(await UploadTweet(twitterItemMapper, enrichedTweet, taskInfo)); } twitterSourceInfo.SinceId = tweets.Select(t => long.Parse(t.Tweetid)).ToList <long>().Max().ToString(); } } return(itemMetaData); }
/// <summary> /// Set parameters accordingly /// </summary> /// <param name="sourceInfo"></param> /// <returns>parameters list</returns> private Dictionary <string, string> getParams(ConnectorTask taskInfo, SourceInfoTwitter sourceInfo) { Dictionary <string, string> parameters = new Dictionary <string, string> { { "include_entities", "true" }, { "count", "200" }, { "include_rts", "true" }, }; if (Convert.ToInt64(sourceInfo.SinceId) > 0) { parameters.Add("since_id", sourceInfo.SinceId); } return(parameters); }
/// <summary> /// Fetches updated posts /// </summary> /// <param name="taskInfo">info related to task, eg accessToken</param> /// <param name="sourceInfo">contains data source information</param> public async Task FetchUpdates(ConnectorTask taskInfo, SourceInfoFB sourceInfo, List <ItemMetadata> itemMetadata) { if (taskInfo.DirtyEntities != null && taskInfo.DirtyEntities?.Count != 0) { Trace.TraceInformation($"Number of dirty posts: {taskInfo.DirtyEntities?.Count}"); string url = QueryFB.GetUpdatedPostsUrl(taskInfo); AuthenticationHeaderValue header = new AuthenticationHeaderValue("Bearer", sourceInfo.AccessToken); Dictionary <string, PostFB> dict = await this.downloader.GetWebContent <Dictionary <string, PostFB>, ErrorsFB>(url, header); foreach (KeyValuePair <string, PostFB> postEntry in dict) { await HandlePost(postEntry.Value, header, sourceInfo.PageId, sourceInfo.PageName, taskInfo, itemMetadata); } } }
public async Task DownloadDataAndTransformUnitTest() { ConnectorTask taskInfo = new ConnectorTask(); taskInfo.StartTime = DateTime.Parse("2018-01-09"); taskInfo.EndTime = DateTime.Parse("2018-01-11"); taskInfo.JobId = "job1"; taskInfo.TaskId = "task1"; taskInfo.TenantId = "tenant1"; var mockDownloader = new Mock <IDownloader>(); mockDownloader.SetupSequence(x => x.GetWebContent <PostListFB, ErrorsFB>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(JsonConvert.DeserializeObject <PostListFB>(File.ReadAllText(@"FakeData\FakeData.json"))) .ReturnsAsync(JsonConvert.DeserializeObject <PostListFB>("{\"data\": []}")); mockDownloader.Setup(x => x.DownloadFileAsBase64EncodedString(It.IsAny <string>())) .ReturnsAsync(Convert.ToBase64String(File.ReadAllBytes(@"FakeData\FakeImage.jpg"))); FakeUploader uploader = new FakeUploader(); JobProcessorFB job = new JobProcessorFB(mockDownloader.Object, uploader); string sourceInfo = "{\"PageId\":\"123\",\"AccessToken\":\"Fake\",\"PageName\":\"Fake123\"}"; await job.FetchData(taskInfo, sourceInfo); // Assert 1 item of each type - post, comment, reply int assertItemsCount = 0; foreach (var entry in uploader.fakeStorage) { if (File.Exists($@"FakeData\{entry.Key}.json")) { assertItemsCount++; string expectedJson = File.ReadAllText($@"FakeData\{entry.Key}.json"); Item expectedItem = JsonConvert.DeserializeObject <Item>(expectedJson); AssertItemsAreEqual(expectedItem, entry.Value); } } Assert.AreEqual(assertItemsCount, 3); }
public async Task Execute(string jobMessage) { ConnectorTask taskInfo = JsonConvert.DeserializeObject <ConnectorTask>(jobMessage); IEventApiClient eventApiClient = new EventApiClient(new Auth(Settings.AAdAppId, Settings.AAdAppSecret), Settings.EventAPIBaseUrl); IUploader uploader = new BlobUploader(taskInfo.BlobSasUri); string sourceInfo = await GetSourceInfoFromTable(taskInfo); Trace.TraceInformation($"Fetched job info from PageJobEntity Table for JobId: {taskInfo.JobId} and TaskId: {taskInfo.TaskId}"); Status status; List <ItemMetadata> itemMetadata = new List <ItemMetadata>(); IDownloader downloader = new Downloader(); TwitterSchemaToItemMapper itemMapper = new TwitterSchemaToItemMapper(); JobProcessorTwitter jobProcessor = new JobProcessorTwitter(downloader, uploader, itemMapper); try { itemMetadata = await jobProcessor.FetchData(taskInfo, sourceInfo); SourceInfoTwitter twitterSourceInfo = JsonConvert.DeserializeObject <SourceInfoTwitter>(sourceInfo); var listId = itemMetadata.Select(t => long.Parse(t.id)).ToList(); twitterSourceInfo.SinceId = listId.Count == 0 ? twitterSourceInfo.SinceId : listId.Max().ToString(); await jobProcessor.UpdateSourceInfo(taskInfo, twitterSourceInfo); status = Status.Success; Trace.TraceInformation($"Successfully completed Job Execution, JobId:{taskInfo.JobId}, TaskId:{taskInfo.TaskId}"); } catch (HttpRequestException e) { status = Status.TemporaryFailure; Trace.TraceError($"Connectivity Error, JobId:{taskInfo.JobId}, TaskId:{taskInfo.TaskId}, Error: {e.Message}, ErrorStackTrace: {e.StackTrace}"); } catch (Exception e) { status = Status.PermanentFailure; Trace.TraceError($"Unknown Failure, Requires Attention, JobId:{taskInfo.JobId}, TaskId:{taskInfo.TaskId}, Error: {e.Message}, ErrorStackTrace: {e.StackTrace}"); } itemMetadata.OrderBy(i => i.id); itemMetadata.Reverse(); await eventApiClient.OnDownloadCompleteAsync(taskInfo.TenantId, taskInfo.JobId, taskInfo.TaskId, status, itemMetadata); }
public async Task FetchData_WhenErrorReturned_ThenExceptionThrown() { var tweets = JsonConvert.DeserializeObject <List <Tweet> >(File.ReadAllText(@"FakeTweets.json")); var max = tweets.Select(t => long.Parse(t.Tweetid)).ToList <long>().Max().ToString(); var error = new ErrorsTwitter() { Errors = new List <ErrorTypeTwitter> { new ErrorTypeTwitter() { Code = 400, ErrorMessage = "Bad Request" } }, }; downloader.Setup(x => x.GetWebContent <List <Tweet>, ErrorsTwitter>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>())) .Throws(new HttpRequestException()); ConnectorTask connectorTask = new ConnectorTask { TenantId = "tenantId", JobId = "j1", TaskId = "t1", StartTime = DateTime.UtcNow.AddMonths(-2), EndTime = DateTime.UtcNow, DirtyEntities = null, BlobSasUri = "dummyUri" }; SourceInfoTwitter sourceInfo = new SourceInfoTwitter() { SinceId = max, }; jobProcessor = new JobProcessorTwitter(downloader.Object, uploader.Object, new TwitterSchemaToItemMapper()); var list = await jobProcessor.FetchData(connectorTask, JsonConvert.SerializeObject(sourceInfo)); }
public async Task NoPostsFetchedUnitTest() { ConnectorTask taskInfo = new ConnectorTask(); taskInfo.StartTime = DateTime.Parse("2018-01-09"); taskInfo.EndTime = DateTime.Parse("2018-01-11"); taskInfo.JobId = "job1"; taskInfo.TaskId = "task1"; taskInfo.TenantId = "tenant1"; var mockDownloader = new Mock <IDownloader>(); mockDownloader.Setup(x => x.GetWebContent <PostListFB, ErrorsFB>(It.IsAny <string>(), It.IsAny <AuthenticationHeaderValue>())) .ReturnsAsync(JsonConvert.DeserializeObject <PostListFB>("{\"data\": []}")); FakeUploader uploader = new FakeUploader(); JobProcessorFB job = new JobProcessorFB(mockDownloader.Object, uploader); string sourceInfo = "{\"PageId\":\"123\",\"AccessToken\":\"Fake\",\"PageName\":\"Fake123\"}"; await job.FetchData(taskInfo, sourceInfo); Assert.AreEqual(uploader.fakeStorage.Count, 0); }
/// <summary> /// Fetches Posts wrt given time interval /// </summary> /// <param name="taskInfo">contains the time stamps for which data is to be fetched</param> /// <param name="sourceInfo">contains data source information</param> public async Task FetchPosts(ConnectorTask taskInfo, SourceInfoFB sourceInfo, List <ItemMetadata> itemMetadata) { string url = QueryFB.GetFeedUrl(taskInfo, sourceInfo); Trace.TraceInformation($"Fetching Data from Facebook, TenantId: {taskInfo.TenantId}, JobId: {taskInfo.JobId}, StartTime: {taskInfo.StartTime.ToString()}, EndTime: {taskInfo.EndTime.ToString()}"); AuthenticationHeaderValue header = new AuthenticationHeaderValue("Bearer", sourceInfo.AccessToken); do { PostListFB list = await this.downloader.GetWebContent <PostListFB, ErrorsFB>(url, header); PostFB[] postList = list.Data; PagingFB pagingPointers = list.Paging; if (postList.Count() == 0) { break; } foreach (PostFB post in postList) { await HandlePost(post, header, sourceInfo.PageId, sourceInfo.PageName, taskInfo, itemMetadata); } url = pagingPointers?.Next; } while (url != null); }
public Method(string label) : base(label, false) { _preconditions = new ConnectorPrecondition(_children, "Precondition {0}", "Preconditions", 1, int.MaxValue); _genericChildren = new ConnectorTask(_children, "Task {0}", "Tasks", 1, int.MaxValue); }
private async Task HandlePost(PostFB post, AuthenticationHeaderValue header, string pageId, string pageName, ConnectorTask taskInfo, List <ItemMetadata> itemMetadata) { Item postItem = await CreatePostItem(post, pageId, pageName, taskInfo, itemMetadata); CommentFB comments = post.Comments; bool moreComments = false; do { if (moreComments) { // only if there are more comments to this post, to be fetched comments = await this.downloader.GetWebContent <CommentFB, ErrorsFB>(comments.Paging.Next, header); } if (comments != null && comments.Data.Count() != 0) { List <CommentDataFB> Data = comments.Data.ToList(); foreach (CommentDataFB comment in Data) { await HandleComment(comment, header, pageId, postItem, taskInfo, itemMetadata); } } moreComments = true; } while (comments?.Paging?.Next != null); }
/// <summary> /// Handles Twitter Tweet /// </summary> /// <param name="twitterItemMapper">Transforms Twitter data to Item Schema</param> /// <param name="tweet">Twitter tweet</param> private async Task <ItemMetadata> UploadTweet(TwitterSchemaToItemMapper twitterItemMapper, Tweet tweet, ConnectorTask taskInfo) { Item item = twitterItemMapper.MapTweetToItem(tweet); string fileName = await uploader.UploadItem(taskInfo.JobId, taskInfo.TaskId, item); Trace.TraceInformation("Tweet Uploaded to Azure Blobs"); return(new ItemMetadata(item.Id, item.SentTimeUtc, fileName)); }
private async Task HandleComment(CommentDataFB comment, AuthenticationHeaderValue header, string pageId, Item postItem, ConnectorTask taskInfo, List <ItemMetadata> itemMetadata) { Item commentItem = await CreateCommentItem(comment, pageId, postItem, taskInfo, itemMetadata); commentItem.PreContext = null; if (comment.CommentCount > 0) { bool moreReplies = false; do { if (moreReplies) { // only if there are more replies to this comment, to be fetched comment.Comments = await this.downloader.GetWebContent <CommentFB, ErrorsFB>(comment.Comments.Paging.Next, header); } if (comment.Comments?.Data != null) { foreach (CommentDataFB reply in comment.Comments.Data) { await HandleReply(reply, pageId, commentItem, postItem, taskInfo, itemMetadata); } moreReplies = true; } } while (comment.Comments?.Paging?.Next != null); } }
private async Task CreateReplyItem(CommentDataFB reply, string pageId, Item commentItem, Item postItem, ConnectorTask taskInfo, List <ItemMetadata> itemMetadata) { Item replyItem = new Item() { SchemaVersion = new Version(1, 0), Id = reply.Id, ContainerId = pageId, ContainerName = postItem.ContainerName, SourceType = "Facebook", ItemType = "Reply", ContentType = ContentType.Text, Content = reply.Message, ParentId = commentItem.Id, ThreadId = postItem.Id, SentTimeUtc = DateTime.Parse(reply.CreatedTime), Sender = ToItemUser(reply.From), NumOfLikes = reply.LikeCount, MessagePreviewText = postItem.Content, Recipients = Array.Empty <User>(), PreContext = new List <Item>() { postItem, commentItem }, }; if (reply.Attachment != null) { replyItem.ContentAttachments = new List <ContentAttachment>(); string attachmentType = reply.Attachment.Type; string downloadedContent = await this.downloader.DownloadFileAsBase64EncodedString(reply.Attachment.Media?.Image?.Src); ContentAttachment attachment = new ContentAttachment() { AttachmentFileName = attachmentType.Contains("share") ? "safe_image.jpg" : FetchNameFromUri(attachmentType.Contains("video") ? reply.Attachment.Media?.Source : reply.Attachment.Media?.Image?.Src), AttachmentType = attachmentType, Content = downloadedContent, Uri = new Uri(attachmentType.Contains("video") ? reply.Attachment.Url : reply.Attachment.Media?.Image?.Src), }; replyItem.ContentAttachments.Add(attachment); } string fileName = await uploader.UploadItem(taskInfo.JobId, taskInfo.TaskId, replyItem); itemMetadata.Add(new ItemMetadata(replyItem.Id, replyItem.SentTimeUtc, fileName)); }
private async Task HandleReply(CommentDataFB reply, string pageId, Item commentItem, Item postItem, ConnectorTask taskInfo, List <ItemMetadata> itemMetadata) { await CreateReplyItem(reply, pageId, commentItem, postItem, taskInfo, itemMetadata); }
private async Task <Item> CreatePostItem(PostFB post, string pageId, string pageName, ConnectorTask taskInfo, List <ItemMetadata> itemMetadata) { Item postItem = new Item() { SchemaVersion = new Version(1, 0), Id = post.Id, ContainerId = pageId, ContainerName = pageName, SourceType = "Facebook", ItemType = "Post", ContentType = ContentType.Text, Content = post.Message, ParentId = string.Empty, ThreadId = post.Id, SentTimeUtc = DateTime.Parse(post.CreatedTime), Sender = ToItemUser(post.From), NumOfLikes = post.Likes?.Summary?.TotalCount ?? 0, MessagePreviewText = post.Message, Recipients = Array.Empty <User>(), }; if (post.Attachments != null) { postItem.ContentAttachments = new List <ContentAttachment>(); if (post.Attachments.Data?[0]?.Media == null) { AttachmentDataFB[] attachmentData = post.Attachments.Data?[0]?.Subattachments?.Data; foreach (AttachmentDataFB attachmentItem in attachmentData) { string downloadedContent = await this.downloader.DownloadFileAsBase64EncodedString(attachmentItem.Media?.Image?.Src); ContentAttachment attachment = new ContentAttachment() { AttachmentFileName = FetchNameFromUri(attachmentItem.Media?.Image?.Src), AttachmentType = attachmentItem.Type, Content = downloadedContent, Uri = new Uri(attachmentItem.Media?.Image?.Src), }; postItem.ContentAttachments.Add(attachment); } } else { // only one video allowed per post, checking attachment type string attachmentType = post.Attachments.Data[0].Type; string downloadedContent = await this.downloader.DownloadFileAsBase64EncodedString(post.Attachments.Data[0].Media?.Image?.Src); ContentAttachment attachment = new ContentAttachment() { AttachmentFileName = attachmentType.Contains("share") ? "safe_image.jpg" : FetchNameFromUri(attachmentType.Contains("video") ? post.Attachments.Data[0].Media?.Source : post.Attachments.Data[0].Media?.Image?.Src), AttachmentType = attachmentType, Content = downloadedContent, Uri = new Uri(attachmentType.Contains("video") ? post.Attachments.Data[0].Url : post.Attachments.Data[0].Media?.Image?.Src), }; postItem.ContentAttachments.Add(attachment); } } string fileName = await uploader.UploadItem(taskInfo.JobId, taskInfo.TaskId, postItem); itemMetadata.Add(new ItemMetadata(postItem.Id, postItem.SentTimeUtc, fileName)); return(postItem); }
/// <summary> /// Handles Twitter Tweet /// </summary> /// <param name="twitterItemMapper">Transforms Twitter data to Item Schema</param> /// <param name="tweet">Twitter tweet</param> private async Task <List <ItemMetadata> > UploadTweet(TwitterSchemaToItemMapper twitterItemMapper, Tweet tweet, ConnectorTask taskInfo) { List <Item> postItem = await twitterItemMapper.MapTweetToItemList(tweet); List <ItemMetadata> itemMetaDataList = new List <ItemMetadata>(); foreach (var item in postItem) { string fileName = await uploader.UploadItem(taskInfo.JobId, taskInfo.TaskId, item); Trace.TraceInformation("Tweet Uploaded to Azure Blobs"); itemMetaDataList.Add(new ItemMetadata(item.Id, item.SentTimeUtc, fileName)); } return(itemMetaDataList); }
public Connector(Node local, ISender ps, ConnectToMessage ctm, ConnectionOverlord co, object state) { _sync = new Object(); _local_node = local; _is_finished = 0; _got_ctms = new ArrayList(); _sender = ps; _ctm = ctm; _co = co; _task = new ConnectorTask(ps); _abort = new WriteOnce<AbortCheck>(); State = state; }