}
@Override
public Value<BigQueryLoadJobReference> run(BigQueryLoadJobReference pollResult,
Integer numRetries) throws Exception {
Job pollJob = BigQueryLoadGoogleCloudStorageFilesJob.getBigquery().jobs()
.get(pollResult.getJobReference().getProjectId(), pollResult.getJobReference().getJobId())
.execute();
ErrorProto fatalError = pollJob.getStatus().getErrorResult();
List<ErrorProto> errors = pollJob.getStatus().getErrors();
if (fatalError != null) {
log.severe("Job failed while writing to Bigquery. Retrying...#attempt " + numRetries
+ " Error details : " + fatalError.getReason() + ": " + fatalError.getMessage() + " at "
+ fatalError.getLocation());
return futureCall(new BigQueryLoadFileSetJob(dataset, tableName, projectId, bundle, schema),
immediate(++numRetries));
}
if (errors != null) {
log.log(Level.SEVERE, "Bigquery load job for files " + bundle
+ " completed with following errors. Bigquery does not consider these errors fatal. Hence the job went to completion.");
for (ErrorProto error : errors) {
log.log(Level.SEVERE, "Error: [REASON] " + error.getReason() + " [MESSAGE] "
+ error.getMessage() + " [LOCATION] " + error.getLocation());
}
}
FutureValue<Void> deleteJob = futureCall(new DeleteFilesJob(), immediate(bundle));
return futureCall(new ReturnResult<BigQueryLoadJobReference>(),
immediate(new BigQueryLoadJobReference("DONE", pollJob.getJobReference())),
waitFor(deleteJob));
}