/// <summary>Snippet for ListCrawledUrls</summary> public void ListCrawledUrlsRequestObject() { // Snippet: ListCrawledUrls(ListCrawledUrlsRequest, CallSettings) // Create client WebSecurityScannerClient webSecurityScannerClient = WebSecurityScannerClient.Create(); // Initialize request argument(s) ListCrawledUrlsRequest request = new ListCrawledUrlsRequest { Parent = "", }; // Make the request PagedEnumerable <ListCrawledUrlsResponse, CrawledUrl> response = webSecurityScannerClient.ListCrawledUrls(request); // Iterate over all response items, lazily performing RPCs as required foreach (CrawledUrl item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListCrawledUrlsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (CrawledUrl item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page <CrawledUrl> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (CrawledUrl item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet }