349350351352353354355356357358359
// background threads call process(). crawlQueue.wait (); } } catch (InterruptedException e) {} timer.cancel (); for (int i=0; i<worms.length; ++i) worms[i].die (); if (state == CrawlEvent.PAUSED) { // put partly-processed links back in fetchQueue
11441145114611471148114911501151115211531154
throw new IOException ("disallowed by Robot Exclusion Standard (robots.txt)"); page = new Page (w.link, dp); } finally { timer.cancel (); } if (w.dead) return;