Esempio n. 1
0
        public static List <IDataItem> GetNewItems(AbstractSource source)
        {
            if (source.GetMetaDataValue(JobsProvider.META_SOURCE).ToString() == Jobs.JobsProvider.SourceNames.Monster.ToString())
            {
                int Pages;

                try
                {
                    Pages = int.Parse(source.GetMetaDataValue(JobsProvider.META_PAGES).ToString());
                }
                catch
                {
                    Pages = int.Parse(DefaultPages.ToString());
                }

                ConcurrentBag <IDataItem> items = new ConcurrentBag <IDataItem>();

                int    pageCounter = 0;
                object counterLock = new object();

                try
                {
                    for (int i = 1; i <= Pages; i++)
                    {
                        string url = source.GetMetaDataValue(JobsProvider.META_URL) + "&pg=" + i;

                        var pageItems = GetItemsOnPage(url, source);

                        FilterUtil.FilterAndAdd(pageItems, items, pageCounter, counterLock, ((ISource2)source).Services);
                    }
                }
                catch (Exception e)
                {
                    //Log
                }

                while (true)
                {
                    lock (counterLock)
                    {
                        if (pageCounter == 0)
                        {
                            break;
                        }
                    }

                    Thread.Sleep(2000);
                }

                return(items.ToList());
            }
            else
            {
                return(null);
            }
        }
Esempio n. 2
0
        public static List <IDataItem> GetNewItems(ISource source)
        {
            if (source.GetMetaDataValue(JobsProvider.META_SOURCE).ToString() == Jobs.JobsProvider.SourceNames.Dice.ToString())
            {
                int    pages       = int.Parse(source.GetMetaDataValue(JobsProvider.META_PAGES).ToString());
                int    size        = int.Parse(source.GetMetaDataValue(JobsProvider.META_PAGE_SIZE).ToString());
                string rangeString = source.GetMetaDataValue(JobsProvider.META_RANGE).ToString();
                string query       = source.GetMetaDataValue(JobsProvider.META_QUERY).ToString();
                string location    = source.GetMetaDataValue(JobsProvider.META_LOCATION).ToString();

                string QUERY_BASE = CreateBaseUrl(query, location, rangeString);

                ConcurrentBag <IDataItem> items = new ConcurrentBag <IDataItem>();

                int    pageCounter = 0;
                object counterLock = new object();

                try
                {
                    for (int i = 1; i <= pages; i++)
                    {
                        string url = QUERY_BASE + "-startPage-" + i + "-limit-" + size + "-jobs.html";

                        var pageItems = GetItemsOnPage(url, source);

                        FilterUtil.FilterAndAdd(pageItems, items, pageCounter, counterLock, ((ISource2)source).Services);
                    }
                }
                catch (Exception e)
                {
                    //Log
                }

                while (true)
                {
                    lock (counterLock)
                    {
                        if (pageCounter == 0)
                        {
                            break;
                        }
                    }

                    Thread.Sleep(2000);
                }

                return(items.ToList());
            }
            else
            {
                return(null);
            }
        }