Пример #1
0
        /// <summary>
        /// copy file and update latest file
        /// </summary>
        /// <param name="fileFullName">full name of the file</param>
        public static void CopyFileAndUpdateLatestFile(string fileFullName)
        {
            if (File.Exists(fileFullName))
            {
                string nameNoFolder = Path.GetFileNameWithoutExtension(fileFullName);
                string shortName    = nameNoFolder.Substring(0, nameNoFolder.LastIndexOf('.'));

                string folder = InitInfo.FileCopyFolder;
                try
                {
                    if (!Directory.Exists(folder))
                    {
                        Directory.CreateDirectory(folder);
                    }
                    string savePath          = Path.Combine(folder, Path.GetFileName(fileFullName));
                    string shortNameFilePath = Path.Combine(folder, shortName + Path.GetExtension(fileFullName));
                    InitInfo.LogMessage("Copy file to " + savePath);
                    File.Copy(fileFullName, savePath);
                    InitInfo.LogMessage("Update file  " + shortNameFilePath);
                    File.Copy(fileFullName, shortNameFilePath, true);
                }
                catch (Exception ex)
                {
                    InitInfo.LogMessage("Failed to CopyFile. Exceptoin: " + ex.GetBaseException().ToString());
                }
            }
        }
Пример #2
0
        /// <summary>
        /// Crawl news and comments,save file to local file
        /// </summary>
        public void Crawl()
        {
            string content = string.Empty;

            if (!GetCrawlFlag())
            {
                return;
            }

            bool isCrawled = false;
            int  tryCount  = 0;

            while (!isCrawled && tryCount++ < InitInfo.RetryCount)
            {
                InitInfo.LogMessage(string.Format("Starting to crawl {0} site...", GetSiteName()));
                try
                {
                    IList <NewsItem> news = GetNewsList();
                    if (news != null && news.Count > 0)
                    {
                        switch (Path.GetExtension(GetFileName()).ToLower())
                        {
                        case ".json":
                            content = CrawlAsJson(news);
                            break;

                        case ".xml":
                            content = CrawlAsXml(news);
                            break;

                        default:
                            content = CrawlAsXml(news);
                            break;
                        }

                        if (!string.IsNullOrEmpty(content))
                        {
                            isCrawled = true;
                        }
                    }
                }
                catch (Exception ex)
                {
                    InitInfo.LogMessage("Failed to crawl. Exceptoin: " + ex.GetBaseException().ToString());
                }

                if (!isCrawled)
                {
                    InitInfo.LogMessage("Failed to crawl. Retry after 10 seconds...");
                    System.Threading.Thread.Sleep(10 * 1000);
                    continue;
                }
                else
                {
                    string fileName = GetFileName();
                    InitInfo.LogMessage("Save file to " + fileName);
                    ArticleParserHelper.SaveToLocalFile(fileName, content);
                    ArticleParserHelper.CopyFileAndUpdateLatestFile(fileName);
                }
            }
            if (!isCrawled)
            {
                InitInfo.LogMessage("Error occurs when crawl news and comments!");
            }
        }