public async Task <FeedEntity> GetFeedAsyc(string url, string userId) { var feed = new FeedEntity(userId); feed.Items = new List <FeedItemEntity>(); feed.Url = url; using (var httpClient = _httpClientFactory.CreateClient()) { var stream = await httpClient.GetStreamAsync(url); using (var xmlReader = XmlReader.Create(stream, new XmlReaderSettings() { Async = true })) { var feedReader = new RssFeedReader(xmlReader); while (await feedReader.Read()) { switch (feedReader.ElementType) { case SyndicationElementType.Category: case SyndicationElementType.Person: case SyndicationElementType.Link: break; case SyndicationElementType.Item: var item = await feedReader.ReadItem(); var feedItem = new FeedItemEntity(feed.Id); feedItem.Title = item.Title; feedItem.Content = item.Description; feedItem.Url = item.Links.FirstOrDefault()?.Uri.ToString(); feed.Items.Add(feedItem); break; case SyndicationElementType.Image: var image = await feedReader.ReadImage(); feed.Image = image.Url.ToString(); break; default: var content = await feedReader.ReadContent(); if (content.Name.Equals("title", StringComparison.OrdinalIgnoreCase)) { feed.Title = content.Value; } if (content.Name.Equals("description", StringComparison.OrdinalIgnoreCase)) { feed.Description = content.Value; } break; } } } } return(feed); }
public ActionResult ScanFeeds() { HtmlWeb hw = new HtmlWeb(); HtmlDocument doc = hw.Load("http://www.craigslist.org/about/sites"); foreach (HtmlNode link in doc.DocumentNode.SelectNodes("//a[@href]")) { if (!link.OuterHtml.Contains("about") && link.OuterHtml.Contains("craigslist.org")) { var href = "http:" + link.GetAttributeValue("href", string.Empty); var name = link.InnerText; try { var newFeed = new FeedEntity(); newFeed.Guid = Guid.NewGuid(); newFeed.PersonGuid = Guid.Parse("fa406ba9-b63d-4492-bb02-ba6bc5c0fbd8"); newFeed.Name = name; newFeed.FeedUrl = href; newFeed.Save(); } catch (ApplicationException ex) { //this feed already exist is mostly the cause } } } return(View()); }
/// <summary>Creates a new, empty FeedEntity object.</summary> /// <returns>A new, empty FeedEntity object.</returns> public override IEntity Create() { IEntity toReturn = new FeedEntity(); // __LLBLGENPRO_USER_CODE_REGION_START CreateNewFeed // __LLBLGENPRO_USER_CODE_REGION_END return(toReturn); }
public void InsertFeed(FeedEntity feed) { var result = _client.Index(feed); if (!result.Created) { throw new DataException("Can't Insert Data"); } }
static void Main(string[] args) { DateTime phDate = DateTime.UtcNow.AddHours(8); List <RTTripUpdates> tripUpdates = db.Database .SqlQuery <RTTripUpdates>("RTTripUpdatesGetActive @travel_date" , new SqlParameter("@travel_date", phDate.ToString("yyyyMMdd"))) .ToList(); FeedMessage feed = new FeedMessage(); if (tripUpdates.Count > 0) { foreach (RTTripUpdates tripUpdate in tripUpdates) { TripDescriptor td = new TripDescriptor(); td.TripId = tripUpdate.trip_id; td.RouteId = tripUpdate.route_id; td.DirectionId = (uint)tripUpdate.direction_id; td.StartDate = tripUpdate.start_date; td.StartTime = tripUpdate.start_time; TripUpdate tp = new TripUpdate(); tp.Delay = tripUpdate.delay; tp.Trip = td; FeedEntity entity = new FeedEntity(); entity.TripUpdate = tp; entity.Id = tripUpdate.id.ToString(); feed.Entities.Add(entity); } byte[] objSerialized = Functions.ProtoSerialize(feed); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString")); string filename = "tripupdate.pb"; // Create a CloudFileClient object for credentialed access to Azure Files. CloudFileClient fileClient = storageAccount.CreateCloudFileClient(); // Get a reference to the file share we created previously. CloudFileShare share = fileClient.GetShareReference("gtfsrt"); share.CreateIfNotExists(); var rootDir = share.GetRootDirectoryReference(); using (var stream = new MemoryStream(objSerialized, writable: false)) { rootDir.GetFileReference(filename).UploadFromStream(stream);//.UploadFromByteArray(feed,); } } // Functions.CreatePBFile(storageAccount, filename, objSerialized); }
public static FeedEntity ToFeedEntity(this SyndicationItem item) { var newFeed = new FeedEntity { Title = item.Title.Text, }; ExtractFeedContent(item, newFeed); return(newFeed); }
public async Task CreateFeed(FeedEntity feed) { var insertFeed = TableOperation.Insert(feed); var batchOperation = new TableBatchOperation(); foreach (var item in feed.Items) { batchOperation.Insert(item); } await _feedTable.ExecuteAsync(insertFeed); await _feedItemsTable.ExecuteBatchAsync(batchOperation); }
private static void ExtractFeedContent(SyndicationItem item, FeedEntity newFeed) { if (item.Content != null) { var textContent = item.Content as TextSyndicationContent; if (textContent != null) { newFeed.Content = textContent.Text; } } else { newFeed.Content = item.Summary.Text; } }
public JsonResult GetSearchResults(string guidString, string category, string searchterm, bool telecommute, bool contract) { var feed = new FeedEntity(Guid.Parse(guidString)); var list = new FeedListModel(); list.Site = feed.Name; list.FeedUrl = feed.FeedUrl; list.Category = category; list.SearchTerm = searchterm; list.Telecommute = telecommute; list.Contract = contract; var addOn = string.Empty; if (telecommute) { addOn = "is_telecommuting=1"; } else { addOn = "is_telecommuting=0"; } if (contract) { addOn += "&is_contract=1"; } if (!feed.FeedUrl.EndsWith("/")) { feed.FeedUrl += "/"; } var url = string.Format("{0}search/{1}?{2}&query={3}&format=rss", feed.FeedUrl, category, addOn, searchterm); list.FeedItems = CraigslistReader.GetFeedItems(url); var jss = new JavaScriptSerializer(); string output = jss.Serialize(list.FeedItems); return(Json(output, JsonRequestBehavior.AllowGet)); }
private FeedEntity FindAndRemove(List <FeedEntity> feeds, string name) { //find existing FeedEntity found = null; foreach (var feedEntity in feeds) { if (feedEntity.Name == name) { //winrar found = feedEntity; } } if (found != null) { feeds.Remove(found); } return(found); }
public void Put(string id, [FromBody] FeedEntity item) { if (!ModelState.IsValid) { HttpContext.Response.StatusCode = 400; } else { FeedManager cm = new FeedManager(Settings); var res = cm.Save(item); res.Wait(); string url = Url.RouteUrl("Get", new { id = item.Id }, Request.Scheme, Request.Host.ToUriComponent()); HttpContext.Response.StatusCode = 201; HttpContext.Response.Headers["Location"] = url; } }
private static AlertData GetAlert(FeedMessage feedMessage, FeedEntity entity, TranslatedString.Translation translation, TranslatedString.Translation headerTranslation) { return(new AlertData { AlertId = entity.Id, Cause = entity.Alert.cause.ToString(), DescriptionLanguage = translation?.Language, DescriptionText = translation?.Text, Effect = entity.Alert.effect.ToString(), GtfsRealtimeVersion = feedMessage.Header?.GtfsRealtimeVersion, HeaderLanguage = headerTranslation.Language, HeaderText = headerTranslation.Text, HeaderTimestamp = feedMessage.Header?.Timestamp ?? 0, Incrementality = feedMessage.Header?.incrementality.ToString(), Url = entity.Alert.Url?.Translations.FirstOrDefault()?.Text, InformedEntities = entity.Alert.InformedEntities.Select(e => new AlertInformedEntityData { HeaderTimestamp = feedMessage.Header.Timestamp, AlertId = entity.Id, AgencyId = e.AgencyId, RouteId = e.RouteId, RouteType = e.RouteType, StopId = e.StopId, TripId = e.Trip?.TripId }) .ToList(), ActivePeriods = entity.Alert.ActivePeriods.Select(a => new AlertActivePeriodData { HeaderTimestamp = feedMessage.Header.Timestamp, AlertId = entity.Id, ActivePeriodEnd = a.End, ActivePeriodStart = a.Start }) .ToList() }); }
private static AlertData GetAlert(FeedMessage feedMessage, FeedEntity entity, TranslatedString.Translation translation, TranslatedString.Translation headerTranslation) { return(new AlertData { AlertId = entity.id, Cause = entity.alert.cause.ToString(), DescriptionLanguage = translation?.language, DescriptionText = translation?.text, Effect = entity.alert.effect.ToString(), GtfsRealtimeVersion = feedMessage.header?.gtfs_realtime_version, HeaderLanguage = headerTranslation.language, HeaderText = headerTranslation.text, HeaderTimestamp = feedMessage.header?.timestamp ?? 0, Incrementality = feedMessage.header?.incrementality.ToString(), Url = entity.alert.url?.translation.FirstOrDefault()?.text, InformedEntities = entity.alert.informed_entity.Select(e => new AlertInformedEntityData { HeaderTimestamp = feedMessage.header.timestamp, AlertId = entity.id, AgencyId = e.agency_id, RouteId = e.route_id, RouteType = e.route_type, StopId = e.stop_id, TripId = e.trip?.trip_id }) .ToList(), ActivePeriods = entity.alert.active_period.Select(a => new AlertActivePeriodData { HeaderTimestamp = feedMessage.header.timestamp, AlertId = entity.id, ActivePeriodEnd = a.end, ActivePeriodStart = a.start }) .ToList() }); }
public ActionResult SearchResults(string feeds, string categories, string searchterm, bool telecommute = false, bool contract = false) { var model = new SearchResultsModel(); var sites = feeds.Split(','); var cats = categories.Split(','); foreach (var guidString in sites) { foreach (var category in cats) { var feed = new FeedEntity(Guid.Parse(guidString)); var list = new FeedListModel(); model.Results.Add(list); list.Site = feed.Name; list.FeedUrl = feed.FeedUrl; list.Category = category; list.SearchTerm = searchterm; list.Telecommute = telecommute; list.Contract = contract; var addOn = string.Empty; if (telecommute) { addOn = "is_telecommuting=1"; } else { addOn = "is_telecommuting=0"; } if (contract) { addOn += "&is_contract=1"; } var url = string.Format("{0}search/{1}?{2}&query={3}&format=rss", feed.FeedUrl, category, addOn, searchterm); list.FeedItems = CraigslistReader.GetFeedItems(url); //TODO: Add code to prevent duplicate emails getting sent //Get all the IDs of responses sent to this feed. That way we don't send duplicates //var responses = new ResponseCollection(); //var filter = new PredicateExpression(); //filter.Add(ResponseFields.FeedGuid == feed.Guid); //responses.GetMulti(filter); //list.FeedResponsesByEmail = new List<string>(); //list.FeedResponsesByID = new List<string>(); //foreach (var r in responses) //{ // if (!model.FeedResponsesByEmail.Contains(r.ToEmail)) // model.FeedResponsesByEmail.Add(r.ToEmail); // if (!model.FeedResponsesByID.Contains(r.Id)) // model.FeedResponsesByID.Add(r.Id); //} } } return(View(model)); }
static void Main(string[] args) { int tsOffset = Convert.ToInt32(ConfigurationManager.AppSettings.Get("TimeStampOffset")); long timestamp_from = (long)Functions.ToEpoch(DateTime.UtcNow.AddMinutes(tsOffset)); long timestamp_to = (long)Functions.ToEpoch(DateTime.UtcNow); List <RTVehiclePositions> vehiclePositions = db.Database .SqlQuery <RTVehiclePositions>("RTVehiclePositionsGetActive @timestamp_from, @timestamp_to" , new SqlParameter("@timestamp_from", timestamp_from) , new SqlParameter("@timestamp_to", timestamp_to)) .ToList(); FeedMessage feed = new FeedMessage(); if (vehiclePositions.Count > 0) { foreach (RTVehiclePositions vehiclePosition in vehiclePositions) { TripDescriptor td = new TripDescriptor(); td.TripId = vehiclePosition.trip_id; td.RouteId = vehiclePosition.route_id; td.DirectionId = (uint)vehiclePosition.direction_id; td.StartDate = vehiclePosition.start_date; td.StartTime = vehiclePosition.start_time; Position pos = new Position(); pos.Latitude = (float)vehiclePosition.latitude; pos.Longitude = (float)vehiclePosition.longitude; VehicleDescriptor v = new VehicleDescriptor(); v.Id = vehiclePosition.vehicle_id; v.Label = vehiclePosition.vehicle_label; v.LicensePlate = vehiclePosition.vehicle_license_plate; VehiclePosition vp = new VehiclePosition(); vp.Position = pos; vp.Trip = td; vp.Vehicle = v; vp.CurrentStopSequence = (uint)vehiclePosition.current_stop_sequence; vp.StopId = vehiclePosition.stop_id; vp.Timestamp = (ulong)vehiclePosition.timestamp; FeedEntity entity = new FeedEntity(); entity.Id = vehiclePosition.id.ToString(); entity.Vehicle = vp; feed.Entities.Add(entity); } byte[] objSerialized = Functions.ProtoSerialize(feed); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString")); string filename = "vehicleposition.pb"; // Create a CloudFileClient object for credentialed access to Azure Files. CloudFileClient fileClient = storageAccount.CreateCloudFileClient(); // Get a reference to the file share we created previously. CloudFileShare share = fileClient.GetShareReference("gtfsrt"); share.CreateIfNotExists(); var rootDir = share.GetRootDirectoryReference(); using (var stream = new MemoryStream(objSerialized, writable: false)) { rootDir.GetFileReference(filename).UploadFromStream(stream); } } }
static void Main(string[] args) { SqlParameter parameter = new SqlParameter(); parameter.SqlDbType = System.Data.SqlDbType.DateTime; parameter.ParameterName = "@travel_date"; parameter.Value = DateTime.UtcNow.AddHours(8); List <RTServiceAlerts> serviceAlerts = db.Database .SqlQuery <RTServiceAlerts>("RTServiceAlertsGetActive @travel_date" , parameter) .ToList(); FeedMessage feed = new FeedMessage(); if (serviceAlerts.Count > 0) { foreach (RTServiceAlerts serviceAlert in serviceAlerts) { EntitySelector entitySel = new EntitySelector(); entitySel.RouteId = serviceAlert.route_id; entitySel.StopId = serviceAlert.stop_id; Alert alert = new Alert(); alert.HeaderText = Functions.GenerateTranslatedString(serviceAlert.description); alert.DescriptionText = Functions.GenerateTranslatedString(serviceAlert.header); alert.InformedEntities.Add(entitySel); //alert.Url = GenerateTranslatedString(serviceAlert.id.ToString()); TimeRange tr = new TimeRange(); tr.Start = Functions.ToEpoch(serviceAlert.start_date.AddHours(-8)); tr.End = Functions.ToEpoch(serviceAlert.end_date.AddHours(-8)); alert.ActivePeriods.Add(tr); FeedEntity entity = new FeedEntity(); entity.Alert = alert; entity.Id = serviceAlert.id.ToString(); feed.Entities.Add(entity); } byte[] objSerialized = Functions.ProtoSerialize(feed); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString")); string filename = "alert.pb"; // Create a CloudFileClient object for credentialed access to Azure Files. CloudFileClient fileClient = storageAccount.CreateCloudFileClient(); // Get a reference to the file share we created previously. CloudFileShare share = fileClient.GetShareReference("gtfsrt"); share.CreateIfNotExists(); var rootDir = share.GetRootDirectoryReference(); using (var stream = new MemoryStream(objSerialized, writable: false)) { rootDir.GetFileReference(filename).UploadFromStream(stream);//.UploadFromByteArray(feed,); } } //byte[] objSerialized = Functions.ProtoSerialize(feed); //CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString")); //string filename = "alert.pb"; //Functions.CreatePBFile(storageAccount, filename, objSerialized); }
public List<FeedEntity> GetEntity() { List<FeedEntity> feeds = new List<FeedEntity>(); //分析第一页以获得文章列表页数 httpHelper.URL = string.Format(this.pageURL, 1); string html=httpHelper.GetHTML(); //获取文章列表页数 match = Regex.Match(html, @"共(\d+)页"); int pageCount = int.Parse(match.Groups[1].Value); //遍历每一页 for(int i = 1; i <= pageCount; i++) { //获得每一页的文章列表 httpHelper.URL = string.Format(this.pageURL, i); html = httpHelper.GetHTML(); //处理每一篇文章 MatchCollection matches=regexTitle.Matches(html); foreach (Match m in matches) { FeedEntity feed = new FeedEntity(); //获得文章标题和地址 feed.Title = m.Groups[2].Value.Trim().Replace("/","_"); feed.Url = "http://blog.csdn.net/" + m.Groups[1].Value; Console.WriteLine("正在抓取文章:" + feed.Title); //获得文章内容、分类、标签和发布时间 httpHelper.URL = feed.Url; html = httpHelper.GetHTML(); feed.Time = GetTime(html); feed.Categoryies = GetCategory(html); feed.Tags = GetTags(html); feed.Content = GetContent(html); feeds.Add(feed); } } return feeds; }
private async Task EvaluateSources(ConcurrentStack <SourceEntity> input) { try { while (input.TryPop(out var source)) { if ((int)source.Source >= 20 && (int)source.Source <= 40) { //tamedia sources var resp = await _httpService.DownloadAsync(new Uri(source.LogicBaseUrl + "navigations?client=webapp")); var json = await resp.GetResponseAsStringAsync(); var model = TamediaNavigation.FromJson(json); var existing = source.Feeds.ToList(); var newList = new List <FeedEntity>(); foreach (var navigation in model.Navigations) { if (navigation.CategoryPreview != null) { var found = FindAndRemove(existing, navigation.CategoryPreview.Name) ?? new FeedEntity() { Guid = Guid.NewGuid(), Name = navigation.CategoryPreview.Name }; //correct category found.Url = "categories/" + navigation.CategoryPreview.Id; newList.Add(found); } } //skip adding of front because currently it cannot be processed if (false) { //add / correct special front navigation var front = FindAndRemove(existing, "Front") ?? new FeedEntity() { Guid = Guid.NewGuid(), Name = "Front" }; //correct category front.Url = "fronts/mobile"; newList.Insert(0, front); } //to output source.Feeds.Clear(); foreach (var feedEntity in newList) { source.Feeds.Add(feedEntity); } } else if (source.Source == Sources.ZwanzigMin) { var resp = await _httpService.DownloadAsync(new Uri("http://api.20min.ch/feed/sitemap?&key=" + TwentyMinCustomerKey + "&json&host=m.20min.ch&lang=de")); var json = await resp.GetResponseAsStringAsync(); var feedDic = new Dictionary <string, FeedEntity>(); foreach (var sourceFeed in source.Feeds) { feedDic.Add(sourceFeed.Name, sourceFeed); } source.Feeds.Clear(); source.LogicBaseUrl = "http://api.20min.ch/feed"; var logicLength = source.LogicBaseUrl.Length; var model = GettingStarted.FromJson(json); foreach (var contentItem in model.Content.Items.Item.Where(c => !string.IsNullOrEmpty(c.Category) && c.Type == "view")) { FeedEntity item = null; if (feedDic.ContainsKey(contentItem.Category)) { item = feedDic[contentItem.Category]; } else { item = new FeedEntity { Name = contentItem.Category, Guid = Guid.NewGuid() }; } item.Url = contentItem.FeedFullContentUrl.Substring(logicLength).Replace(TwentyMinCustomerKey, "CUSTOMERKEY"); source.Feeds.Add(item); } } } } catch (Exception e) { Console.WriteLine(e); } }