String sig = parseData.getContentMeta().get(Nutch.SIGNATURE_KEY);
if (sig != null) {
byte[] signature = StringUtil.fromHexString(sig);
if (signature != null) {
// append a CrawlDatum with a signature
CrawlDatum d = new CrawlDatum(CrawlDatum.STATUS_SIGNATURE, 0);
d.setSignature(signature);
crawlOut.append(key, d);
}
}
// see if the parse metadata contain things that we'd like
// to pass to the metadata of the crawlDB entry
CrawlDatum parseMDCrawlDatum = null;
for (String mdname : parseMDtoCrawlDB) {
String mdvalue = parse.getData().getParseMeta().get(mdname);
if (mdvalue != null) {
if (parseMDCrawlDatum == null) parseMDCrawlDatum = new CrawlDatum(
CrawlDatum.STATUS_PARSE_META, 0);
parseMDCrawlDatum.getMetaData().put(new Text(mdname),
new Text(mdvalue));
}
}
if (parseMDCrawlDatum != null) crawlOut.append(key, parseMDCrawlDatum);
if (ignoreExternalLinks) {
// need to determine fromHost (once for all outlinks)
try {
fromHost = new URL(fromUrl).getHost().toLowerCase();
} catch (MalformedURLException e) {
fromHost = null;
}
} else {
fromHost = null;
}
ParseStatus pstatus = parseData.getStatus();
if (pstatus != null && pstatus.isSuccess()
&& pstatus.getMinorCode() == ParseStatus.SUCCESS_REDIRECT) {
String newUrl = pstatus.getMessage();
int refreshTime = Integer.valueOf(pstatus.getArgs()[1]);
newUrl = filterNormalize(fromUrl, newUrl, fromHost,
ignoreExternalLinks, filters, normalizers,
URLNormalizers.SCOPE_FETCHER);
if (newUrl != null) {
String reprUrl = URLUtil.chooseRepr(fromUrl, newUrl,
refreshTime < Fetcher.PERM_REFRESH_TIME);
CrawlDatum newDatum = new CrawlDatum();
newDatum.setStatus(CrawlDatum.STATUS_LINKED);
if (reprUrl != null && !reprUrl.equals(newUrl)) {
newDatum.getMetaData().put(Nutch.WRITABLE_REPR_URL_KEY,
new Text(reprUrl));
}
crawlOut.append(new Text(newUrl), newDatum);
}
}
// collect outlinks for subsequent db update
Outlink[] links = parseData.getOutlinks();
int outlinksToStore = Math.min(maxOutlinks, links.length);
int validCount = 0;
CrawlDatum adjust = null;
List<Entry<Text, CrawlDatum>> targets = new ArrayList<Entry<Text, CrawlDatum>>(outlinksToStore);
List<Outlink> outlinkList = new ArrayList<Outlink>(outlinksToStore);
for (int i = 0; i < links.length && validCount < outlinksToStore; i++) {
String toUrl = links[i].getToUrl();
// Only normalize and filter if fetcher.parse = false
if (!isParsing) {
toUrl = ParseOutputFormat.filterNormalize(fromUrl, toUrl, fromHost, ignoreExternalLinks, filters, normalizers);
if (toUrl == null) {
continue;
}
}
CrawlDatum target = new CrawlDatum(CrawlDatum.STATUS_LINKED, interval);
Text targetUrl = new Text(toUrl);
// see if the outlink has any metadata attached
// and if so pass that to the crawldatum so that
// the initial score or distribution can use that
MapWritable outlinkMD = links[i].getMetadata();
if (outlinkMD!=null){
target.getMetaData().putAll(outlinkMD);
}
try {
scfilters.initialScore(targetUrl, target);
} catch (ScoringFilterException e) {
LOG.warn("Cannot filter init score for url " + key +
", using default: " + e.getMessage());
target.setScore(0.0f);
}
targets.add(new SimpleEntry(targetUrl, target));
// OVerwrite URL in Outlink object with normalized URL (NUTCH-1174)
links[i].setUrl(toUrl);
outlinkList.add(links[i]);
validCount++;
}
try {
// compute score contributions and adjustment to the original score
adjust = scfilters.distributeScoreToOutlinks(key, parseData,
targets, null, links.length);
} catch (ScoringFilterException e) {
LOG.warn("Cannot distribute score from " + key + ": " + e.getMessage());
}
for (Entry<Text, CrawlDatum> target : targets) {
crawlOut.append(target.getKey(), target.getValue());
}
if (adjust != null) crawlOut.append(key, adjust);
Outlink[] filteredLinks = outlinkList.toArray(new Outlink[outlinkList.size()]);
parseData = new ParseData(parseData.getStatus(), parseData.getTitle(),
filteredLinks, parseData.getContentMeta(),
parseData.getParseMeta());
dataOut.append(key, parseData);
if (!parse.isCanonical()) {
CrawlDatum datum = new CrawlDatum();
datum.setStatus(CrawlDatum.STATUS_FETCH_SUCCESS);
String timeString = parse.getData().getContentMeta().get(Nutch.FETCH_TIME_KEY);
try {
datum.setFetchTime(Long.parseLong(timeString));
} catch (Exception e) {
LOG.warn("Can't read fetch time for: " + key);
datum.setFetchTime(System.currentTimeMillis());
}
crawlOut.append(key, datum);
}
}