public void AddNewRecord()
        {
            //arrange
            var record = new TddDemandRecord
            {
                Demand = false,
                Site = "http://localhost",
                Technology = "C#"
            };

            //act
            _context.Add(record);
            _context.SaveChanges();

            //assert
            var found = _context.GetBySiteName("http://localhost").First();
            Assert.That(found, Is.Not.Null);
        }
Esempio n. 2
0
 public void Add(TddDemandRecord record)
 {
     _context.Records.Add(record);
 }
Esempio n. 3
0
 public void Delete(TddDemandRecord record)
 {
     _context.Records.Remove(record);
 }
Esempio n. 4
0
        protected virtual void StartCrawling()
        {
            Logger.Log(BaseUrl + " crawler started...");

            CleanUp();

            for (var nextPage = 1; ; nextPage++)
            {
                var url = CreateNextUrl(nextPage);
                var document = Loader.LoadDocument(url);

                Logger.Log("processing page: [" + nextPage.ToString() + "] with url: " + url);

                var rows = GetJobRows(document);
                var rowsCount = rows.Count();

                Logger.Log("extracted " + rowsCount + " vacations on page");
                if (rowsCount == 0)
                {
                    Logger.Log("no more vacancies to process, breaking main loop");
                    break;
                }

                Logger.Log("starting to process all vacancies");
                foreach (var row in rows)
                {
                    Logger.Log("starting processing div, extracting vacancy href...");
                    var vacancyUrl = GetVacancyUrl(row);
                    if (vacancyUrl == null)
                    {
                        Logger.Log("FAILED to extract vacancy href, not stopped, proceed with next one");
                        continue;
                    }

                    Logger.Log("started to process vacancy with url: " + vacancyUrl);
                    var vacancyBody = GetVacancyBody(Loader.LoadDocument(vacancyUrl));
                    if (vacancyBody == null)
                    {
                        Logger.Log("FAILED to extract vacancy body, not stopped, proceed with next one");
                        continue;
                    }

                    var position = GetPosition(row);
                    var technology = GetTechnology(position, vacancyBody);
                    var demand = GetDemand(vacancyBody);

                    var record = new TddDemandRecord()
                    {
                        Site = BaseUrl,
                        Position = position,
                        Technology = technology,
                        Demand = demand,
                        Url = vacancyUrl
                    };

                    Logger.Log("new record has been created and initialized");
                    Repository.Add(record);
                    Repository.SaveChanges();
                    Logger.Log("record has been successfully stored to database.");
                    Logger.Log("finished to process vacancy");

                }
                Logger.Log("finished to process page");
            }
            Logger.Log(BaseUrl + " crawler has successfully finished");
        }