return null;
}
@SuppressWarnings("nls")
private void genMapRedTasks(QB qb) throws SemanticException {
fetchWork fetch = null;
List<Task<? extends Serializable>> mvTask = new ArrayList<Task<? extends Serializable>>();
Task<? extends Serializable> fetchTask = null;
QBParseInfo qbParseInfo = qb.getParseInfo();
// Does this query need reduce job
if (qb.isSelectStarQuery()
&& qbParseInfo.getDestToClusterBy().isEmpty()
&& qbParseInfo.getDestToDistributeBy().isEmpty()
&& qbParseInfo.getDestToOrderBy().isEmpty()
&& qbParseInfo.getDestToSortBy().isEmpty()) {
boolean noMapRed = false;
Iterator<Map.Entry<String, Table>> iter = qb.getMetaData().getAliasToTable().entrySet().iterator();
Table tab = ((Map.Entry<String, Table>)iter.next()).getValue();
if (!tab.isPartitioned()) {
if (qbParseInfo.getDestToWhereExpr().isEmpty()) {
fetch = new fetchWork(tab.getPath().toString(), Utilities.getTableDesc(tab), qb.getParseInfo().getOuterQueryLimit());
noMapRed = true;
inputs.add(new ReadEntity(tab));
}
}
else {
if (topOps.size() == 1) {
TableScanOperator ts = (TableScanOperator)topOps.values().toArray()[0];
// check if the pruner only contains partition columns
if (PartitionPruner.onlyContainsPartnCols(topToTable.get(ts), opToPartPruner.get(ts))) {
PrunedPartitionList partsList = null;
try {
partsList = PartitionPruner.prune(topToTable.get(ts), opToPartPruner.get(ts), conf, (String)topOps.keySet().toArray()[0], prunedPartitions);
} catch (HiveException e) {
// Has to use full name to make sure it does not conflict with org.apache.commons.lang.StringUtils
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
// If there is any unknown partition, create a map-reduce job for the filter to prune correctly
if (partsList.getUnknownPartns().size() == 0) {
List<String> listP = new ArrayList<String>();
List<partitionDesc> partP = new ArrayList<partitionDesc>();
Set<Partition> parts = partsList.getConfirmedPartns();
Iterator<Partition> iterParts = parts.iterator();
while (iterParts.hasNext()) {
Partition part = iterParts.next();
listP.add(part.getPartitionPath().toString());
try{
partP.add(Utilities.getPartitionDesc(part));
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
inputs.add(new ReadEntity(part));
}
fetch = new fetchWork(listP, partP, qb.getParseInfo().getOuterQueryLimit());
noMapRed = true;
}
}
}
}
if (noMapRed) {
fetchTask = TaskFactory.get(fetch, this.conf);
setFetchTask(fetchTask);
// remove root tasks if any
rootTasks.clear();
return;
}
}
// In case of a select, use a fetch task instead of a move task
if (qb.getIsQuery()) {
if ((!loadTableWork.isEmpty()) || (loadFileWork.size() != 1))
throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
String cols = loadFileWork.get(0).getColumns();
String colTypes = loadFileWork.get(0).getColumnTypes();
fetch = new fetchWork(new Path(loadFileWork.get(0).getSourceDir()).toString(),
new tableDesc(LazySimpleSerDe.class, TextInputFormat.class,
IgnoreKeyTextOutputFormat.class,
Utilities.makeProperties(
org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "" + Utilities.ctrlaCode,
org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, cols,