String inputFormat = null, outputFormat = null;
tableName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch (child.getToken().getType()) {
case HiveParser.TOK_QUERY: // CTAS
throw new SemanticException("Operation not supported. Create table as Select is not a valid operation.");
case HiveParser.TOK_TABLEBUCKETS:
throw new SemanticException("Operation not supported. Howl doesn't allow Clustered By in create table.");
case HiveParser.TOK_TBLSEQUENCEFILE:
throw new SemanticException("Operation not supported. Howl doesn't support Sequence File by default yet. " +
"You may specify it through INPUT/OUTPUT storage drivers.");
case HiveParser.TOK_TBLTEXTFILE:
throw new SemanticException("Operation not supported. Howl doesn't support Text File by default yet. " +
"You may specify it through INPUT/OUTPUT storage drivers.");
case HiveParser.TOK_LIKETABLE:
String likeTableName;
if (child.getChildCount() > 0 && (likeTableName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText())) != null) {
throw new SemanticException("Operation not supported. CREATE TABLE LIKE is not supported.");
// Map<String, String> tblProps;
// try {
// tblProps = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTableName).getParameters();
// } catch (HiveException he) {
// throw new SemanticException(he);
// }
// if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) && tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){
// throw new SemanticException("Operation not supported. Table "+likeTableName+" should have been created through Howl. Seems like its not.");
// }
// return ast;
}
break;
case HiveParser.TOK_IFNOTEXISTS:
try {
List<String> tables = db.getTablesByPattern(tableName);
if (tables != null && tables.size() > 0) { // table exists
return null;
}
} catch (HiveException e) {
throw new SemanticException(e);
}
break;
case HiveParser.TOK_TABLEPARTCOLS:
List<FieldSchema> partCols = BaseSemanticAnalyzer.getColumns((ASTNode) child.getChild(0), false);
for(FieldSchema fs : partCols){
if(!fs.getType().equalsIgnoreCase("string")){
throw new SemanticException("Operation not supported. Howl only supports partition columns of type string. " +
"For column: "+fs.getName()+" Found type: "+fs.getType());
}
}
break;
case HiveParser.TOK_TABLEFILEFORMAT:
if(child.getChildCount() < 4) {
throw new SemanticException("Incomplete specification of File Format. You must provide InputFormat, OutputFormat, InputDriver, OutputDriver.");
}
inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(1).getText());
inStorageDriver = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(2).getText());
outStorageDriver = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(3).getText());
break;
case HiveParser.TOK_TBLRCFILE:
inputFormat = RCFileInputFormat.class.getName();
outputFormat = RCFileOutputFormat.class.getName();