//обработать картинку public void ProcessImage(Bitmap image, int filterID, OnImageProcessed onImageProcessed) { this.onImageProcessed = onImageProcessed; if (Workers.Count == 0) { MessageBox.Show("App must contains at least 1 peer", "Error"); } else { //высчитываем смещение, сколько частей будем ждать и тд int[] imageArray = GetImageArray(image); ArrayTempBuffer = imageArray; int workerOffset = imageArray.Length / Workers.Count; PartsRemained = Workers.Count; Offset = workerOffset; int counter = 0; // для каждого пира foreach (PeerBaseContainer peer in Workers) { //создаём сообщение ImageProcessingMessage message = new ImageProcessingMessage(counter++, filterID, image.Height, image.Width, workerOffset, imageArray); //асинхронно отправляем его Task.Factory.StartNew(() => peer.SendMessage(message)); //асинхронно ожидаем ответа peer.Client.GetStream().BeginRead(peer.Buffer, 0, 4, OnReadFromNetworkStream, peer); } } }
/// <summary> /// Process a range of ImageID after crawling. Useful if crawled Images were not processed at crawl time according to desired ApplicationSettings configuration. /// Calling this method DOES change the 'LastDiscovered' fields where applicable. /// This method is not when crawling, rather during post-processing. /// </summary> /// <param name = "imageIDLowerBound"></param> /// <param name = "imageIDUpperBound"></param> public static void ProcessImages(Crawler <TArachnodeDAO> crawler, long imageIDLowerBound, long imageIDUpperBound) { //do not assign the application settings. doing so will override the ApplicationSetting you set before calling this method... TArachnodeDAO arachnodeDAO = (TArachnodeDAO)Activator.CreateInstance(typeof(TArachnodeDAO), crawler.ApplicationSettings.ConnectionString, crawler.ApplicationSettings, crawler.WebSettings, false, false); ConsoleManager <TArachnodeDAO> consoleManager = new ConsoleManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings); ActionManager <TArachnodeDAO> actionManager = new ActionManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, consoleManager); CookieManager cookieManager = new CookieManager();; MemoryManager <TArachnodeDAO> memoryManager = new MemoryManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings); RuleManager <TArachnodeDAO> ruleManager = new RuleManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, consoleManager); CacheManager <TArachnodeDAO> cacheManager = new CacheManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings); CrawlerPeerManager <TArachnodeDAO> crawlerPeerManager = new CrawlerPeerManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, null, arachnodeDAO); Cache <TArachnodeDAO> cache = new Cache <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, crawler, actionManager, cacheManager, crawlerPeerManager, memoryManager, ruleManager); DiscoveryManager <TArachnodeDAO> discoveryManager = new DiscoveryManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, cache, actionManager, cacheManager, memoryManager, ruleManager); //load the CrawlActions, CrawlRules and EngineActions... ruleManager.ProcessCrawlRules(crawler); actionManager.ProcessCrawlActions(crawler); actionManager.ProcessEngineActions(crawler); //these three methods are called in the Engine. UserDefinedFunctions.RefreshAllowedExtensions(true); UserDefinedFunctions.RefreshAllowedSchemes(true); UserDefinedFunctions.RefreshDisallowed(); //instantiate a WebClient to access the ResponseHeaders... WebClient <TArachnodeDAO> webClient = new WebClient <TArachnodeDAO>(crawler.ApplicationSettings, arachnodeDAO.WebSettings, consoleManager, cookieManager, new ProxyManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, consoleManager)); webClient.GetHttpWebResponse("http://google.com", "GET", null, null, null, null); ImageManager <TArachnodeDAO> imageManager = new ImageManager <TArachnodeDAO>(crawler.ApplicationSettings, crawler.WebSettings, discoveryManager, arachnodeDAO); for (long i = imageIDLowerBound; i <= imageIDUpperBound; i++) { ArachnodeDataSet.ImagesRow imagesRow = null; try { //get the Image from the database. we need the source data as we don't store this in the index. //even though most of the fields are available in the Document, the Image is the authoritative source, so we'll use that for all of the fields. imagesRow = arachnodeDAO.GetImage(i.ToString()); if (imagesRow != null) { if (imagesRow.Source == null || imagesRow.Source.Length == 0) { if (File.Exists(discoveryManager.GetDiscoveryPath(crawler.ApplicationSettings.DownloadedImagesDirectory, imagesRow.AbsoluteUri, imagesRow.FullTextIndexType))) { imagesRow.Source = File.ReadAllBytes(discoveryManager.GetDiscoveryPath(crawler.ApplicationSettings.DownloadedImagesDirectory, imagesRow.AbsoluteUri, imagesRow.FullTextIndexType)); } else { Console.WriteLine("ImageID: " + i + " was NOT processed successfully."); if (OnImageProcessed != null) { OnImageProcessed.BeginInvoke(imagesRow, "ImageID: " + i + " was NOT processed successfully.", null, null); } } } ProcessImage(crawler.ApplicationSettings, crawler.WebSettings, crawler, imagesRow, webClient, cache, actionManager, consoleManager, crawlerPeerManager, discoveryManager, imageManager, memoryManager, ruleManager, arachnodeDAO); Console.WriteLine("ImageID: " + i + " was processed successfully."); if (OnImageProcessed != null) { OnImageProcessed.BeginInvoke(imagesRow, "ImageID: " + i + " was processed successfully.", null, null); } } } catch (Exception exception) { Console.WriteLine("ImageID: " + i + " was NOT processed successfully."); Console.WriteLine(exception.Message); if (OnImageProcessed != null) { OnImageProcessed.BeginInvoke(imagesRow, "ImageID: " + i + " was NOT processed successfully.", null, null); OnImageProcessed.BeginInvoke(imagesRow, exception.Message, null, null); } arachnodeDAO.InsertException(null, null, exception, false); } } //stop the CrawlActions, CrawlRules and EngineActions... ruleManager.Stop(); actionManager.Stop(); }