if(getRobotTimedOut()) {
throw new RobotTimedOutAccessControlException("Unable to check" +
" robots.txt for " + requestUrl);
}
if(getLiveWebGone()) {
throw new RobotNotAvailableException("The URL " + requestUrl +
" is blocked by the sites robots.txt file");
}
if(isSawRobots() && !isPassedRobots()) {
throw new RobotAccessControlException("The URL " + requestUrl +
" is blocked by the sites robots.txt file");