Skip to content

Commit

Permalink
Remove async delegates which are not supported in .net core (https://…
Browse files Browse the repository at this point in the history
  • Loading branch information
Kemyke committed Oct 2, 2016
1 parent 3838672 commit 6c06489
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 460 deletions.
9 changes: 4 additions & 5 deletions Abot.Demo/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,11 @@ static void Main(string[] args)
//crawler = GetManuallyConfiguredWebCrawler();
//crawler = GetCustomBehaviorUsingLambdaWebCrawler();

//Subscribe to any of these asynchronous events, there are also sychronous versions of each.
//This is where you process data about specific events of the crawl
crawler.PageCrawlStartingAsync += crawler_ProcessPageCrawlStarting;
crawler.PageCrawlCompletedAsync += crawler_ProcessPageCrawlCompleted;
crawler.PageCrawlDisallowedAsync += crawler_PageCrawlDisallowed;
crawler.PageLinksCrawlDisallowedAsync += crawler_PageLinksCrawlDisallowed;
crawler.PageCrawlStarting += crawler_ProcessPageCrawlStarting;
crawler.PageCrawlCompleted += crawler_ProcessPageCrawlCompleted;
crawler.PageCrawlDisallowed += crawler_PageCrawlDisallowed;
crawler.PageLinksCrawlDisallowed += crawler_PageLinksCrawlDisallowed;

//Start the crawl
//This is a synchronous call
Expand Down
10 changes: 5 additions & 5 deletions Abot.Tests.Integration/CrawlSiteSimulator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public void Crawl_MaxPagesTo5_OnlyCrawls5Pages()
int pagesCrawledCount = 0;

PoliteWebCrawler crawler = new PoliteWebCrawler(configuration, null, null, null, null, null, null, null, null);
crawler.PageCrawlCompletedAsync += (a, b) => pagesCrawledCount++;
crawler.PageCrawlCompleted += (a, b) => pagesCrawledCount++;

crawler.Crawl(new Uri("http://localhost:1111/"));

Expand All @@ -55,7 +55,7 @@ public void Crawl_MaxPagesTo25_OnlyCrawls25Pages()
int pagesCrawledCount = 0;

PoliteWebCrawler crawler = new PoliteWebCrawler(configuration, null, null, null, null, null, null, null, null);
crawler.PageCrawlCompletedAsync += (a, b) => pagesCrawledCount++;
crawler.PageCrawlCompleted += (a, b) => pagesCrawledCount++;

crawler.Crawl(new Uri("http://localhost:1111/"));

Expand All @@ -74,7 +74,7 @@ public void Crawl_MaxPagesTo5_WithCrawlDelay_OnlyCrawls5Pages()
int pagesCrawledCount = 0;

PoliteWebCrawler crawler = new PoliteWebCrawler(configuration, null, null, null, null, null, null, null, null);
crawler.PageCrawlCompletedAsync += (a, b) => pagesCrawledCount++;
crawler.PageCrawlCompleted += (a, b) => pagesCrawledCount++;

crawler.Crawl(new Uri("http://localhost:1111/"));

Expand All @@ -90,7 +90,7 @@ public void Crawl_CrawlTimeoutIs1Sec_TimesOut()
int pagesCrawledCount = 0;

PoliteWebCrawler crawler = new PoliteWebCrawler(configuration, null, null, null, null, null, null, null, null);
crawler.PageCrawlCompletedAsync += (a, b) => pagesCrawledCount++;
crawler.PageCrawlCompleted += (a, b) => pagesCrawledCount++;

CrawlResult result = crawler.Crawl(new Uri("http://localhost:1111/"));

Expand Down Expand Up @@ -152,7 +152,7 @@ public void Crawl_IsRateLimited()
int pagesCrawledCount = 0;

var crawler = new PoliteWebCrawler(configuration);
crawler.PageCrawlCompletedAsync += (a, b) => pagesCrawledCount++;
crawler.PageCrawlCompleted += (a, b) => pagesCrawledCount++;

var uriToCrawl = new Uri("http://localhost:1111/");
var start = DateTime.Now;
Expand Down
2 changes: 1 addition & 1 deletion Abot.Tests.Integration/CrawlTestBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public CrawlTestBase(Uri rootUri, int maxSecondsToCrawl)

public void CrawlAndAssert(IWebCrawler crawler)
{
crawler.PageCrawlCompletedAsync += crawler_PageCrawlCompleted;
crawler.PageCrawlCompleted += crawler_PageCrawlCompleted;

CrawlResult result = crawler.Crawl(_rootUri);

Expand Down
Loading

0 comments on commit 6c06489

Please sign in to comment.