Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
Partition part = null;
try {
if (tbl == null) {
FileSystem fs = descTbl.getResFile().getFileSystem(conf);
DataOutput outStream = (DataOutput) fs.open(descTbl.getResFile());
String errMsg = "Table " + tableName + " does not exist";
outStream.write(errMsg.getBytes("UTF-8"));
((FSDataOutputStream) outStream).close();
return 0;
}
if (descTbl.getPartSpec() != null) {
part = db.getPartition(tbl, descTbl.getPartSpec(), false);
if (part == null) {
FileSystem fs = descTbl.getResFile().getFileSystem(conf);
DataOutput outStream = (DataOutput) fs.open(descTbl.getResFile());
String errMsg = "Partition " + descTbl.getPartSpec() + " for table "
+ tableName + " does not exist";
outStream.write(errMsg.getBytes("UTF-8"));
((FSDataOutputStream) outStream).close();
return 0;
}
tbl = part.getTable();
}
} catch (FileNotFoundException e) {
LOG.info("describe table: " + stringifyException(e));
return 1;
} catch (IOException e) {
LOG.info("describe table: " + stringifyException(e));
return 1;
}
try {
LOG.info("DDLTask: got data for " + tbl.getName());
List<FieldSchema> cols = null;
if (colPath.equals(tableName)) {
cols = tbl.getCols();
if (part != null) {
cols = part.getTPartition().getSd().getCols();
}
} else {
cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer());
}
FileSystem fs = descTbl.getResFile().getFileSystem(conf);
DataOutput outStream = (DataOutput)fs.create(descTbl.getResFile());
Iterator<FieldSchema> iterCols = cols.iterator();
while (iterCols.hasNext()) {
// create a row per column
FieldSchema col = iterCols.next();
outStream.writeBytes(col.getName());
outStream.write(separator);
outStream.writeBytes(col.getType());
outStream.write(separator);
outStream.writeBytes(col.getComment() == null ? "" : col.getComment());
outStream.write(terminator);
}
if (tableName.equals(colPath)) {
// also return the partitioning columns
List<FieldSchema> partCols = tbl.getPartCols();
Iterator<FieldSchema> iterPartCols = partCols.iterator();
while (iterPartCols.hasNext()) {
FieldSchema col = iterPartCols.next();
outStream.writeBytes(col.getName());
outStream.write(separator);
outStream.writeBytes(col.getType());
outStream.write(separator);
outStream.writeBytes(col.getComment() == null ? "" : col.getComment());
outStream.write(terminator);
}
// if extended desc table then show the complete details of the table
if (descTbl.isExt()) {
// add empty line
outStream.write(terminator);
if (part != null) {
// show partition information
outStream.writeBytes("Detailed Partition Information");
outStream.write(separator);
outStream.writeBytes(part.getTPartition().toString());
outStream.write(separator);
// comment column is empty
outStream.write(terminator);
} else {
// show table information
outStream.writeBytes("Detailed Table Information");
outStream.write(separator);
outStream.writeBytes(tbl.getTTable().toString());
outStream.write(separator);
// comment column is empty
outStream.write(terminator);
}
}
}
LOG.info("DDLTask: written data for " + tbl.getName());