public void run() {
try {
log.info("crawling url: " + urlToCrawl.link());
Page page = downloader.get(urlToCrawl.link());
if (page.getStatusCode() != Status.OK) {
visitor.onError(urlToCrawl, page.getStatusCode());
} else {
visitor.visit(page);
}
for (String l : page.getLinks()) {
String link = normalizer.normalize(l);
final Url url = new Url(link, urlToCrawl.depth() + 1);
if (visitor.followUrl(url)) {
executor.execute(new PageCrawlerExecutor(url, executor, counter, downloader, normalizer, visitor));
}