@Override
public ResultIterator iterator() throws SQLException {
ImmutableBytesPtr[] joinIds = joinInfo.getJoinIds();
assert (joinIds.length == hashExpressions.length && joinIds.length == hashPlans.length);
final HashCacheClient hashClient = new HashCacheClient(plan.getContext().getConnection());
Scan scan = plan.getContext().getScan();
final ScanRanges ranges = plan.getContext().getScanRanges();
int count = joinIds.length;
ConnectionQueryServices services = getContext().getConnection().getQueryServices();
ExecutorService executor = services.getExecutor();
List<Future<ServerCache>> futures = new ArrayList<Future<ServerCache>>(count);
List<SQLCloseable> dependencies = new ArrayList<SQLCloseable>(count);
final int maxServerCacheTimeToLive = services.getProps().getInt(QueryServices.MAX_SERVER_CACHE_TIME_TO_LIVE_MS_ATTRIB, QueryServicesOptions.DEFAULT_MAX_SERVER_CACHE_TIME_TO_LIVE_MS);
final AtomicLong firstJobEndTime = new AtomicLong(0);
SQLException firstException = null;
for (int i = 0; i < count; i++) {
final int index = i;
futures.add(executor.submit(new JobCallable<ServerCache>() {
@Override
public ServerCache call() throws Exception {
QueryPlan hashPlan = hashPlans[index];
ServerCache cache = hashClient.addHashCache(ranges, hashPlan.iterator(),
clientProjectors[index], hashPlan.getEstimatedSize(), hashExpressions[index], plan.getTableRef());
long endTime = System.currentTimeMillis();
boolean isSet = firstJobEndTime.compareAndSet(0, endTime);
if (!isSet && (endTime - firstJobEndTime.get()) > maxServerCacheTimeToLive) {
LOG.warn("Hash plan [" + index + "] execution seems too slow. Earlier hash cache(s) might have expired on servers.");