private void BtnDelete_Click(object sender, RoutedEventArgs e) { CrawlingRule rule = (CrawlingRule)DataContext; CrawlingCondition condition = (CrawlingCondition)((Control)sender).DataContext; rule.Conditions.Remove(condition); }
private void BtnMoveDown_Click(object sender, RoutedEventArgs e) { CrawlingRule rule = (CrawlingRule)DataContext; CrawlingCondition condition = (CrawlingCondition)((Control)sender).DataContext; int index = rule.Conditions.IndexOf(condition); if (index < rule.Conditions.Count - 1) { rule.Conditions.Move(index, index + 1); } }
static DesignTimeModels() { CrawlingBucket bucket = new CrawlingBucket { Name = "Example.com", Description = "", NbThreads = 2, NbRetry = 2, LimitRequests = 100 }; CrawlingCondition condition = new CrawlingCondition { FieldType = CrawlingConditionFieldType.Host, ComparisonType = CrawlingConditionComparisonType.Equals, Value = "www.example.com" }; CrawlingRule rule = new CrawlingRule { Name = "Example.com", Description = "", Behavior = ResourceBehavior.FollowAllReferences, TargetBucket = bucket }; rule.Conditions.Add(condition); CrawlingBucket = new CrawlingBucket { Name = "Bucket name", Description = "Bucket description", LimitRequests = 0, NbThreads = 2, NbRetry = 1 }; CrawlingBucket.HostMappings.Add(new CrawlingHostMapping { Host = "www.example.com", IPAddress = IPAddress.Parse("127.0.0.1") }); ProcessingBucketCollection = new ObservableCollection <CrawlingBucket> { CrawlingBucket, CrawlingBucket }; CrawlingCondition = new CrawlingCondition { ComparisonType = CrawlingConditionComparisonType.Equals, FieldType = CrawlingConditionFieldType.Scheme, Value = "http" }; ProcessingConditionCollection = new ObservableCollection <CrawlingCondition> { CrawlingCondition, new CrawlingCondition { ComparisonType = CrawlingConditionComparisonType.Equals, FieldType = CrawlingConditionFieldType.Host, Value = "www.example.com" } }; CrawlingRule = new CrawlingRule { Name = "Rule name", Description = "Rule description", Behavior = ResourceBehavior.FollowAllReferences, TargetBucket = CrawlingBucket }; CrawlingRule.Conditions.AddRange(ProcessingConditionCollection); ProcessingRuleCollection = new ObservableCollection <CrawlingRule> { CrawlingRule, CrawlingRule }; CrawlingConfig = new CrawlingConfig { Name = "Processing configuration name", Description = "Processing configuration description", }; CrawlingConfig.Buckets.Add(bucket); CrawlingConfig.Rules.Add(rule); CrawlingConfig.StartingUrls.Add(new CrawlingStartingUrl { Name = "SiteMap", Value = new Uri("http://www.exaple.com/SiteMap.axd?UrlEncode=false") }); ProcessingConfigurationCollection = new ObservableCollection <CrawlingConfig> { CrawlingConfig, CrawlingConfig, CrawlingConfig }; FileViewModel = new FileViewModel(new CrawlingContext(), new DispatcherQueue(Dispatcher.CurrentDispatcher)); FileViewModel.Model.Resources.Add(new Resource(new Uri("http://www.google.ca"), ResourceBehavior.FollowAllReferences)); ReportConfig = new ReportConfig { Guid = Guid.NewGuid(), Name = "Resources", Description = "Some description", Type = ReportType.ListResources }; ReportConfig.Columns.AddRange(new[] { new ReportConfigColumn { Name = "Starting time", Path = "Resource.TimeStart", Width = 20 }, new ReportConfigColumn { Name = "Content type", Path = "Resource.Headers.ContentType", Width = 15 }, new ReportConfigColumn { Name = "Status code", Path = "Resource.HttpStatusCode", Width = 15 }, new ReportConfigColumn { Name = "URL", Path = "Resource.URL.AbsoluteUri" } }); ReportCollection = new ObservableCollection <ReportConfig> { ReportConfig, new ReportConfig { Name = "References", Type = ReportType.ListResources } }; MainViewModel = new MainViewModel(); MainViewModel.Files.Add(FileViewModel); }