/// <summary> /// 爬虫单元线程 /// </summary> private void CrawlerThread(object obj) { TaskList tasklist = obj as TaskList; string changeIp = null; while (tasklist.GetSerplusNum() > 0) { try { string companyName = tasklist.GetNext(); var transitionModel = CrawlerWork(companyName, changeIp); changeIp = transitionModel.Value; //if (!SqlHelper.InserInfo(transitionModel.Key)) OutLog(companyName + "写入数据库错误。"); if (!DoSql.InsertBj(transitionModel.Key)) { OutLog(companyName + "写入数据库错误。"); } } catch (Exception e) { OutLog(e.Message); } } OutLog("线程【" + Thread.CurrentThread.ManagedThreadId + "】爬取任务结束"); tasklist.StopThead(); }
public string SingelCrawlerThread(string searchText, ListBox list, TextBox listbox, string changeIp = null) { ListLogs = listbox; try { string companyName = searchText; var transitionModel = CrawlerWork(companyName, changeIp); changeIp = transitionModel.Value; if (!DoSql.InsertBj(transitionModel.Key)) { OutLog(companyName + ",写入数据库错误。"); } } catch (Exception e) { OutLog(e.Message); } OutLog("线程【" + Thread.CurrentThread.ManagedThreadId + "】爬取任务结束"); return(changeIp); }
/// <summary> /// Request请求回调 SqlServer /// </summary> /// <param name="hsc"></param> /// <param name="content"></param> /// <param name="param"></param> private void OnComplete(HttpStatusCode hsc, string content, string param) { //保留前10条 var cusList = GetDtoList(hsc, content, param); if (cusList.Count > 10) { OutLog("【消息】发现采集数据大于10条,此处仅保留前10条记录。"); cusList = cusList.Take(10).ToList(); } DoSql.InsertQg(cusList); //SqlHelper.InserInfo(cusList); if (cusList.Count > 0) { OutLog("【完成】任务:" + param); } else { OutLog("【缺省】任务:" + param); } Thread.Sleep(250); }