Beispiel #1
0
        /// <summary>
        /// Initializes a new instance of the FeedActionResult class
        /// </summary>
        /// <param name="blogName">Name of the blog</param>
        /// <param name="description">Feed description</param>
        /// <param name="format">Format of the feed</param>
        /// <param name="url">A URL Helper</param>
        /// <param name="posts">The posts to include in the feed</param>
        public FeedActionResult(string blogName, string description, FeedFormat format, UrlHelper url, IEnumerable<BlogPost> posts)
        {
            Guid blogPostId;
            string postRelative;
            SyndicationItem item;
            List<SyndicationItem> items = new List<SyndicationItem>();

            // Specify the type of feed
            Format = format;

            // Initialize the current feed
            Feed = new SyndicationFeed(blogName, description, new Uri(url.RouteUrl("Default"), UriKind.Relative));

            //load the posts as items
            foreach (BlogPost post in posts)
            {
                blogPostId = post.BlogPostId;
                postRelative = url.Action(
                    "Details", "Posts",
                    new
                    {
                        year = post.PostedDate.Value.Year,
                        month = post.PostedDate.Value.Month,
                        day = post.PostedDate.Value.Day,
                        id = blogPostId
                    });

                item = new SyndicationItem(post.Title, post.Post,
                    new Uri(postRelative, UriKind.Relative), post.BlogPostId.ToString(), post.PostedDate.Value);

                items.Add(item);
            }

            Feed.Items = items.OrderByDescending(x => x.LastUpdatedTime);
        }
 private static bool TryUpdateFeedWithNewToolsAndTemplates(JObject feed, FeedFormat format, CoreToolsInfo coreToolsInfo)
 {
     try
     {
         // Get a cloned object to not modify the exisiting release
         JObject currentReleaseEntryJson = GetCurrentReleaseEntry(feed, coreToolsInfo.MajorVersion).DeepClone() as JObject;
         JObject newReleaseEntryJson     = GetNewReleaseEntryJson(currentReleaseEntryJson, format, coreToolsInfo);
         return(TryAddNewReleaseToFeed(feed, newReleaseEntryJson, coreToolsInfo.MajorVersion));
     }
     catch
     {
         return(false);
     }
 }
Beispiel #3
0
        public static IFeedEntryUpdater GetFeedEntryUpdater(FeedFormat format)
        {
            switch (format)
            {
            case FeedFormat.V3:
                return(new V3FormatFeedEntryUpdater());

            case FeedFormat.V4:
                return(new V4FormatFeedEntryUpdater());

            default:
                throw new InvalidOperationException($"Unidentified feed format '{format}'");
            }
        }
 public static BaseFeedActionResult GetFeedActionResult(FeedFormat format)
 {
     switch (format)
     {
         case FeedFormat.Rss:
             return new RssActionResult();
             break;
         case FeedFormat.Atom:
             return new AtomActionResult();
             break;
         default:
             throw new ArgumentOutOfRangeException("format");
     }
 }
        public SyndicationFeedFormatter CreateFeed(FeedFormat format, UrlHelper urlHelper)
        {
            var items =
                ListPictures(urlHelper)
                .Union(
                    ListPosts(urlHelper))
                .OrderByDescending(item => item.PublishDate);

            var feed = new SyndicationFeed("Molimentum", "lat.: Bemühung, große Anstrengung", new Uri("http://molimentum.at"))
            {
                Items = items
            };

            return(GetFormatter(format, feed));
        }
        public static void GenerateNewFeed(string feedName, CoreToolsInfo coreToolsInfo)
        {
            Console.WriteLine();
            Console.WriteLine($"Preparing CLI feed for version: '{coreToolsInfo.Version}' for feed: '{feedName}'");

            JObject    feedJson = GetFeedJSON(feedName);
            FeedFormat format   = _feedNameToFormat[feedName];

            if (TryUpdateFeedWithNewToolsAndTemplates(feedJson, format, coreToolsInfo))
            {
                string path = Path.Combine(coreToolsInfo.ArtifactsDirectory, feedName);
                WriteToJsonFile(feedJson, path);
            }
            else
            {
                Console.WriteLine($"WARNING: No existing entries found for version {coreToolsInfo.MajorVersion} in {feedName}. You may have to manually add a version before this tool will work. Skipping this feed.");
            }
        }
Beispiel #7
0
        private Tuplet<MimeType, XDoc> MakeNewsFeed(IEnumerable<RecentChangeEntry> recentchanges, XUri feedUri, string feedTitle, FeedFormat format, DateTime since) {
            var resources = DekiContext.Current.Resources;
            var changes = new List<RecentChangeEntry>();
            DekiContext deki = DekiContext.Current;
            bool diffCacheEnabled = deki.Instance.RecentChangesDiffCaching;

            // check if we need to merge change entries
            MimeType mime = MimeType.XML;
            if((format == FeedFormat.ATOM_DAILY) || (format == FeedFormat.RAW_DAILY)) {

                // combine changes that occurred on the same day
                Dictionary<string, DigestLookupEntry> pageLookup = new Dictionary<string, DigestLookupEntry>();
                Dictionary<string, DigestLookupEntry> commentLookup = new Dictionary<string, DigestLookupEntry>();
                Dictionary<string, ulong> commentDescriptToCommentLookup = new Dictionary<string, ulong>();
                List<Dictionary<string, KeyValuePair<string, int>>> authors = new List<Dictionary<string, KeyValuePair<string, int>>>();
                int index = 0;
                foreach(var change in recentchanges) {
                    ulong pageId = change.CurId;
                    if(pageId == 0) {

                        // should never happen, but if it does, just ignore this entry
                        continue;
                    }
                    DateTime timestamp = change.Timestamp;
                    NS ns = change.Namespace;
                    RC type = change.Type;
                    string author = change.Username;
                    string fullname = change.Fullname ?? change.Username;

                    // check if we processing a comment or page change
                    if(Utils.IsPageComment(type)) {
                        ulong commentId = change.CmntId ?? 0;
                        string comment = change.Comment;
                        if(commentId == 0) {

                            // NOTE (steveb): because the recentchanges table is brain dead, we sometimes cannot associate a comment change with the comment that affected it;
                            //                luckily, when that happens, there is a good chance that the description for the change is the same as an earlier one;
                            //                so all we need to do is to lookup the previous change using the current change description.

                            if(!commentDescriptToCommentLookup.TryGetValue(comment ?? string.Empty, out commentId)) {
                                continue;
                            }
                        } else if(comment != null) {
                            commentDescriptToCommentLookup[comment] = commentId;
                        }

                        // remove revision number (not applicable)
                        change.Revision = 0;

                        // check if we need to merge this change with a previous one
                        DigestLookupEntry entry;
                        string key = string.Format("{0}-{1}", commentId, timestamp.DayOfYear);
                        if(commentLookup.TryGetValue(key, out entry)) {
                            var item = changes[entry.Index];
                            ++item.EditCount;

                            // append the change comments
                            if(item.ExtraComments == null) {
                                item.ExtraComments = new List<Tuplet<string, string, string>>();

                                // first add the existing comment to the list
                                item.ExtraComments.Add(new Tuplet<string, string, string>(item.Username, item.Fullname, item.Comment));
                            }
                            item.ExtraComments.Add(new Tuplet<string, string, string>(change.Username, change.Fullname, change.Comment));

                            // updated edit count for author
                            KeyValuePair<string, int> authorEdits;
                            authors[entry.Index].TryGetValue(author, out authorEdits);
                            authors[entry.Index][author] = new KeyValuePair<string, int>(fullname, authorEdits.Value + 1);
                        } else {
                            change.EditCount = 1;

                            // NOTE (steveb): we always create the lookup to create a discontinuity with previous changes on the same page;
                            //                this causes ungroupable changes (e.g. MOVE) to split groupable changes; thus avoiding
                            //                that these groupable changes get inproperly grouped since they aren't continuous.

                            // create a new entry, either because this page has no existing entry yet, or the change cannot be grouped with other changes
                            commentLookup[key] = new DigestLookupEntry(timestamp, index, type);
                            authors.Add(new Dictionary<string, KeyValuePair<string, int>>());
                            authors[authors.Count - 1].Add(author, new KeyValuePair<string, int>(fullname, 1));

                            changes.Add(change);
                            ++index;
                        }
                    } else {

                        // add a default edit count
                        if(change.EditCount == 0) {
                            change.EditCount = Utils.IsPageEdit(type) ? 1 : 0;
                        }

                        // check if we need to merge this change with a previous one
                        DigestLookupEntry entry;
                        string key = string.Format("{0}-{1}-{2}", ns, pageId, timestamp.DayOfYear);
                        if(pageLookup.TryGetValue(key, out entry) && Utils.IsPageModification(type) && Utils.IsPageModification(entry.Type)) {
                            var item = changes[entry.Index];

                            // update 'rc_last_oldid' to reflect the older page id of the combined records
                            if(Utils.IsPageEdit(type)) {
                                item.LastOldId = change.LastOldId;
                                item.EditCount = item.EditCount + 1;
                                if(change.Revision != 0) {
                                    item.PreviousRevision = change.Revision - 1;
                                }
                            }

                            // append the change comments
                            if(item.ExtraComments == null) {
                                item.ExtraComments = new List<Tuplet<string, string, string>>();

                                // first add the existing comment to the list
                                item.ExtraComments.Add(new Tuplet<string, string, string>(item.Username, item.Fullname, item.Comment));
                            }
                            item.ExtraComments.Add(new Tuplet<string, string, string>(change.Username, change.Fullname, change.Comment));

                            // updated edit count for author
                            KeyValuePair<string, int> authorEdits;
                            authors[entry.Index].TryGetValue(author, out authorEdits);
                            authors[entry.Index][author] = new KeyValuePair<string, int>(fullname, authorEdits.Value + 1);
                        } else {

                            // NOTE (steveb): we always create the lookup to create a discontinuity with previous changes on the same page;
                            //                this causes ungroupable changes (e.g. MOVE) to split groupable changes; thus avoiding
                            //                that these groupable changes get inproperly grouped since they aren't continuous.

                            // create a new entry, either because this page has no existing entry yet, or the change cannot be grouped with other changes
                            pageLookup[key] = new DigestLookupEntry(timestamp, index, type);
                            authors.Add(new Dictionary<string, KeyValuePair<string, int>>());
                            authors[authors.Count - 1].Add(author, new KeyValuePair<string, int>(fullname, 1));

                            // check if page was changed
                            if(Utils.IsPageEdit(type)) {

                                // update previous revision number
                                change.PreviousRevision = change.Revision - 1;
                            } else if(Utils.IsPageModification(type)) {

                                // set previous revision number
                                change.PreviousRevision = change.Revision;
                            }
                            changes.Add(change);
                            ++index;
                        }
                    }
                }

                // create list of authors as comment line
                for(int i = 0; i < changes.Count; ++i) {
                    var change = changes[i];

                    // create an array of (fullname, username) author names
                    var sortedAuthors = (from author in authors[i] select new KeyValuePair<string, string>(author.Key, author.Value.Key)).ToList();
                    sortedAuthors.Sort((x, y) => StringComparer.OrdinalIgnoreCase.Compare(x.Value, y.Value));
                    string authorList = Utils.LinguisticJoin(from author in sortedAuthors select (string.IsNullOrEmpty(author.Value) ? author.Key : author.Value), resources.Localize(DekiResources.AND()));

                    // add-up all edit operations
                    int editTotal = 0;
                    foreach(KeyValuePair<string, int> edits in authors[i].Values) {
                        editTotal += edits.Value;
                    }

                    // reset comment for standard edits
                    RC type = change.Type;
                    if(Utils.IsPageModification(type) || Utils.IsPageComment(type)) {
                        string summary = null;
                        switch(editTotal) {
                        case 2:
                            summary = resources.Localize(DekiResources.EDIT_SUMMARY_TWO(authorList, editTotal));
                            break;
                        case 1:
                            summary = resources.Localize(DekiResources.EDIT_SUMMARY_ONE(authorList, editTotal));
                            break;
                        case 0:
                            break;
                        default:
                            summary = resources.Localize(DekiResources.EDIT_SUMMARY_MANY(authorList, editTotal));
                            break;
                        }
                        change.Summary = summary;
                    }

                    // reflect that multiple authors edited article, if appropriate
                    change.SortedAuthors = sortedAuthors;
                }

                // check if only the digest format was requested
                if(format == FeedFormat.RAW_DAILY) {
                    XDoc digest = new XDoc("digest");
                    foreach(var change in changes) {
                        change.AppendXml(digest);
                    }
                    return new Tuplet<MimeType, XDoc>(mime, digest);
                }
            } else if(format == FeedFormat.ATOM_ALL) {

                // keep all changes
                foreach(var change in recentchanges) {
                    if(Utils.IsPageEdit(change.Type)) {
                        change.PreviousRevision = change.Revision - 1;
                    } else {
                        change.Revision = 0;
                    }
                    changes.Add(change);
                }
            } else if(format == FeedFormat.DAILY_ACTIVITY) {

                // need to establish how many pages and users exist in total
                var pagesTotal = (int)DbUtils.CurrentSession.Pages_GetCount();
                var usersTotal = (int)DbUtils.CurrentSession.Users_GetCount();

                // daily activity format
                XDoc table = new XDoc("activity").Attr("type", "daily");
                DateTime missing = DateTime.UtcNow.Date;
                foreach(var change in from recentchange in recentchanges
                                      where (recentchange.Namespace == NS.MAIN) || (recentchange.Namespace == NS.USER)
                                      group recentchange by recentchange.Timestamp.Date into recentchangesByDate
                                      select new {
                                          Date = recentchangesByDate.Key,

                                          // count as edited pages, pages that were not created or deleted the same day
                                          PagesEdited = recentchangesByDate.Where(rc => (rc.Type == RC.EDIT) && !recentchangesByDate.Any(rc2 => (rc.CurId == rc2.CurId) && ((rc2.Type == RC.NEW) || (rc2.Type == RC.PAGERESTORED) || (rc.Type == RC.PAGEDELETED)))).Distinct(rc => rc.CurId).Count(),

                                          // count as created pages, pages that were not deleted later the same day
                                          PagesCreated = recentchangesByDate.Count(rc => ((rc.Type == RC.NEW) || (rc.Type == RC.PAGERESTORED)) && !recentchangesByDate.Any(rc2 => (rc2.CurId == rc.CurId) && (rc2.Id < rc.Id) && (rc.Type == RC.PAGEDELETED))),

                                          // count as deleted pages, pages that were not created or restored earlier the same day
                                          PagesDeleted = recentchangesByDate.Count(rc => (rc.Type == RC.PAGEDELETED) && !recentchangesByDate.Any(rc2 => (rc.CurId == rc2.CurId) && (rc2.Id > rc.Id) && ((rc2.Type == RC.NEW) || (rc2.Type == RC.PAGERESTORED)))),

                                          // simple counting of created users
                                          UsersCreated = recentchangesByDate.Count(rc => rc.Type == RC.USER_CREATED)
                                      }
                ) {

                    // check if we need to add empty entries for missing days
                    for(; missing > change.Date; missing = missing.AddDays(-1)) {
                        table.Start("entry").Attr("date", missing)
                            .Elem("pages.total", pagesTotal)
                            .Elem("pages.created", 0)
                            .Elem("pages.edited", 0)
                            .Elem("pages.deleted", 0)
                            .Elem("users.total", usersTotal)
                            .Elem("users.created", 0)
                        .End();
                    }

                    // add this day's entry
                    table.Start("entry").Attr("date", change.Date)
                        .Elem("pages.total", pagesTotal)
                        .Elem("pages.created", change.PagesCreated)
                        .Elem("pages.edited", change.PagesEdited)
                        .Elem("pages.deleted", change.PagesDeleted)
                        .Elem("users.total", usersTotal)
                        .Elem("users.created", change.UsersCreated)
                    .End();
                    
                    // NOTE (steveb): pages total might become negative if user created didn't actually create a user page
                    pagesTotal -= change.PagesCreated - change.PagesDeleted + change.UsersCreated;
                    usersTotal -= change.UsersCreated;

                    // indicate that current is *not* missing
                    missing = change.Date.AddDays(-1);
                }

                // pad with missing records
                for(; missing >= since; missing = missing.AddDays(-1)) {
                    table.Start("entry").Attr("date", missing)
                        .Elem("pages.total", pagesTotal)
                        .Elem("pages.created", 0)
                        .Elem("pages.edited", 0)
                        .Elem("pages.deleted", 0)
                        .Elem("users.total", usersTotal)
                        .Elem("users.created", 0)
                    .End();
                }
                return new Tuplet<MimeType, XDoc>(mime, table);
            } else {

                // unknown or RAW format
                XDoc table = new XDoc("table");
                foreach(var change in recentchanges) {
                    change.AppendXml(table);
                }
                return new Tuplet<MimeType, XDoc>(mime, table);
            }

            // compose feed document
            mime = MimeType.ATOM;
            XAtomFeed feed = new XAtomFeed(feedTitle, feedUri, DateTime.UtcNow) { Language = deki.Instance.SiteLanguage, Id = feedUri };
            Dictionary<string, XDoc> cache = new Dictionary<string, XDoc>();
            foreach(var change in changes) {
                RC type = change.Type;
                if(Utils.IsPageHiddenOperation(type)) {

                    // no real content to produce; let's skip it
                    continue;
                }

                // build feed content
                Title title = Title.FromDbPath(change.Namespace, change.Title, null);
                XDoc description = new XDoc("div");
                AppendDiff(diffCacheEnabled, description, change, type, title, cache);

                // add item to feed
                try {
                    DateTime timestamp = change.Timestamp;
                    XAtomEntry entry = feed.StartEntry(title.AsPrefixedUserFriendlyPath(), timestamp, timestamp);
                    XUri id = XUri.TryParse(Utils.AsPublicUiUri(title));
                    if(id != null) {
                        if(id.Segments.Length == 0) {
                            id = id.WithTrailingSlash();
                        }
                        entry.Id = id.WithFragment(DbUtils.ToString(change.Timestamp));
                    }
                    entry.AddAuthor(((change.SortedAuthors == null) || (change.SortedAuthors.Count == 1)) ? (string.IsNullOrEmpty(change.Fullname) ? change.Username : change.Fullname) : resources.Localize(DekiResources.EDIT_MULTIPLE()), null, null);
                    entry.AddLink(new XUri(Utils.AsPublicUiUri(title)), XAtomBase.LinkRelation.Alternate, null, null, null);
                    entry.AddSummary(MimeType.XHTML, description);
                    feed.End();
                } catch(Exception e) {
                    _log.ErrorExceptionMethodCall(e, "MakeNewsFeed", title.AsPrefixedDbPath());
                }
            }

            // insert <ins> styles
            foreach(XDoc ins in feed[".//ins"]) {
                ins.Attr("style", "color: #009900;background-color: #ccffcc;text-decoration: none;");
            }

            // insert <del> styles
            foreach(XDoc del in feed[".//del"]) {
                del.Attr("style", "color: #990000;background-color: #ffcccc;text-decoration: none;");
            }
            return new Tuplet<MimeType, XDoc>(mime, feed);
        }
Beispiel #8
0
        private Tuplet<MimeType, XDoc> MakeNewsFeedCached(Func<IEnumerable<RecentChangeEntry>> recentchanges, XUri feedUri, string feedTitle, string feedName, List<string> feedNameSuffixes, FeedFormat format, DateTime since) {
            DekiContext deki = DekiContext.Current;
            TimeSpan feedCacheTtl = deki.Instance.RecentChangesFeedCachingTtl;

            // cache the feed if caching is enabled, if an ATOM format is requested, and the user is not logged in
            if((feedCacheTtl > TimeSpan.Zero) && ((format == FeedFormat.ATOM_ALL) || (format == FeedFormat.ATOM_DAILY)) && UserBL.IsAnonymous(deki.User)) {

                // compute complete feed name
                if(feedNameSuffixes.Count > 0) {
                    feedName += "(" + string.Join(",", feedNameSuffixes.ToArray()) + ")";
                }
                feedName += ".xml";

                // check if there is a cached version of the feed
                Plug store = Storage.At("site_" + XUri.EncodeSegment(DekiContext.Current.Instance.Id), DreamContext.Current.Culture.Name, "users", string.Format("user_{0}", DekiContext.Current.User.ID), feedName);
                var v = store.Get(new Result<DreamMessage>(TimeSpan.MaxValue)).Wait();
                XDoc cachedFeed = (v.IsSuccessful && v.HasDocument) ? v.ToDocument() : null;
                if(cachedFeed != null) {

                    // let's validate the timestamp on the feed as well (just in case the cache storage didn't remove the item)
                    DateTime now = DateTime.UtcNow;
                    DateTime updated = cachedFeed["_:updated"].AsDate ?? now;
                    if(now.Subtract(updated) < feedCacheTtl) {
                        return new Tuplet<MimeType, XDoc>(MimeType.ATOM, cachedFeed);
                    }
                }
                var result = MakeNewsFeed(recentchanges(), feedUri, feedTitle, format, since);
                if(!result.Item2.IsEmpty) {
                    store.With("ttl", feedCacheTtl.TotalSeconds).Put(result.Item2, new Result<DreamMessage>(TimeSpan.MaxValue)).Block();
                }
                return result;
            }
            return MakeNewsFeed(recentchanges(), feedUri, feedTitle, format, since);
        }
Beispiel #9
0
        private static void ExtractRecentChangesParameters(DreamContext context, out DateTime since, out int limit, out int offset, out FeedFormat format, out string language, out NS ns, ref List<string> feedNameSuffixes) {

            // extract 'since' parameter
            since = DbUtils.ToDateTime(context.GetParam("since", DbUtils.ToString(DateTime.MinValue)));

            // extract 'limit' parameter
            limit = context.GetParam("limit", 100);
            if((limit <= 0) || (limit > MAX_RECENT_CHANGES)) {
                throw new MaxParameterInvalidArgumentException();
            }

            // extract 'offset' parameter
            offset = context.GetParam("offset", 0);
            if(offset < 0) {
                throw new OffsetParameterInvalidArgumentException();
            }

            // extract 'format' parameter
            switch(context.GetParam("format", "daily")) {
            case "raw":
                format = FeedFormat.RAW;
                break;
            case "rawdaily":
            case "dailyraw":
            case "digest":
                format = FeedFormat.RAW_DAILY;
                break;
            case "daily":
            case "atom":
                format = FeedFormat.ATOM_DAILY;
                break;
            case "all":
                format = FeedFormat.ATOM_ALL;
                break;
            default:
                throw new FormatParameterInvalidArgumentException();
            }

            // extract 'language' parameter
            language = context.GetParam("language", null);
            if(null != language) {
                PageBL.ValidatePageLanguage(language);
            }

            // extract 'namespace' parameter
            ns = context.GetParam<NS>("namespace", NS.UNKNOWN);

            // determine feed suffix
            if(feedNameSuffixes == null) {
                feedNameSuffixes = new List<string>();
            }
            if(context.GetParam("format", null) != null) {
                feedNameSuffixes.Add(string.Format("format={0}", context.GetParam("format", null)));
            }
            if(context.GetParam("since", null) != null) {
                feedNameSuffixes.Add(string.Format("since={0}", DbUtils.ToString(since)));
            }
            if(context.GetParam("limit", null) != null) {
                feedNameSuffixes.Add(string.Format("limit={0}", limit));
            }
            if(context.GetParam("offset", null) != null) {
                feedNameSuffixes.Add(string.Format("offset={0}", offset));
            }
            if(context.GetParam("language", null) != null) {
                feedNameSuffixes.Add(string.Format("lang={0}", language));
            }
            if(ns != NS.UNKNOWN) {
                feedNameSuffixes.Add(string.Format("namespace={0}", ns));
            }
        }
Beispiel #10
0
 //-----------------------
 public UpdateChannelThread(FeedReaderForm form, FeedNode feedNode)
 {
     m_form       = form;
     m_feedNode   = feedNode;
     m_feedFormat = FeedFormat.Rss;
 }
Beispiel #11
0
 public static FeedSegment ParseFeed(Uri feedUrl, XElement element, XElement someOtherElement, out FeedFormat format)
 {
 }
Beispiel #12
0
 public ActionResult Feed(FeedFormat format)
 {
     var url = string.Format("{0}://{1}/now/", Request.Url.Scheme, Request.Url.Authority);
     var items = _siteService.FindRecentTemplates().ToFeed(url);
     var feed = new SyndicationFeed(items){Title = new TextSyndicationContent("Razor Do It")};
     return new FeedResult(feed, format);
 }
        private static JObject GetNewReleaseEntryJson(JObject currentReleaseEntry, FeedFormat format, CoreToolsInfo coreToolsInfo)
        {
            IFeedEntryUpdater feedEntryUpdater = FeedEntryUpdaterFactory.GetFeedEntryUpdater(format);

            return(feedEntryUpdater.GetUpdatedFeedEntry(currentReleaseEntry, coreToolsInfo));
        }
Beispiel #14
0
 public FeedResult(SyndicationFeed feed, FeedFormat format)
 {
     Feed = feed;
     Format = format;
 }
Beispiel #15
0
        public string GetFeedURL(int feedID, FeedFormat format, int start, int perPage, int level1CategoryID,
                                 int level2CategoryID, int programmeCategoryID, string[] programmeCategoryLevels,
                                 string lid, bool useHTTPS, bool reverseMapXML, bool bestseller)
        {
            BuyatAffiliateFeedGeturlParameters parameters = new BuyatAffiliateFeedGeturlParameters();
            parameters.feed_id = feedID.ToString();
            parameters.format = (BuyatAffiliateEntitiesFeedFormat)Enum.ToObject(typeof(BuyatAffiliateEntitiesFeedFormat), format);
            if (start != -1)
            {
                parameters.start = start.ToString();
            }
            if (perPage != -1)
            {
                parameters.perpage = perPage.ToString();
            }
            if (level1CategoryID != -1)
            {
                parameters.level1_category_id = level1CategoryID.ToString();
            }
            if (level2CategoryID != -1)
            {
                parameters.level2_category_id = level2CategoryID.ToString();
            }
            if (programmeCategoryID != -1)
            {
                parameters.programme_category_id = programmeCategoryID.ToString();
            }
            if (programmeCategoryLevels != null)
            {
                for (int i = 0; i < programmeCategoryLevels.Length; i++)
                {
                    if (programmeCategoryLevels[i] != null && programmeCategoryLevels[i] != "")
                    {
                        parameters.GetType().GetProperty("level" + (i + 1)).SetValue(parameters, programmeCategoryLevels[i], null);
                    }
                }
            }
            parameters.lid = lid;
            parameters.use_https = useHTTPS;
            parameters.reverse_map_xml = reverseMapXML;
            parameters.bestseller = bestseller;

            BuyatAffiliateFeedGeturlResponse response = binding.buyatAffiliateFeedGeturl(apiKey, parameters);
            return response.url;
        }