}
// drop the table
db.dropTable(dropTbl.getTableName());
if (tbl != null) {
work.getOutputs().add(new WriteEntity(tbl));
}
} else {
// This is actually an ALTER TABLE DROP PARTITION
List<Partition> partsToDelete = new ArrayList<Partition>();
for (PartitionSpec partSpec : dropTbl.getPartSpecs()) {
List<Partition> partitions = null;
// getPartitionsByFilter only works for string columns.
// Till that is fixed, only equality will work for non-string columns.
if (dropTbl.isStringPartitionColumns()) {
try {
partitions = db.getPartitionsByFilter(tbl, partSpec.toString());
} catch (Exception e) {
throw new HiveException(e);
}
}
else {
partitions = db.getPartitions(tbl, partSpec.getPartSpecWithoutOperator());
}
// this is to prevent dropping archived partition which is archived in a
// different level the drop command specified.
int partPrefixToDrop = 0;
for (FieldSchema fs : tbl.getPartCols()) {
if (partSpec.existsKey(fs.getName())) {
partPrefixToDrop += 1;
} else {
break;
}
}
if (!dropTbl.getIgnoreProtection()) {
for (Partition p : partitions) {
if (!p.canDrop()) {
throw new HiveException("Table " + tbl.getTableName()
+ " Partition " + p.getName()
+ " is protected from being dropped");
} else if (ArchiveUtils.isArchived(p)) {
int partAchiveLevel = ArchiveUtils.getArchivingLevel(p);
// trying to drop partitions inside a har, disallow it.
if (partAchiveLevel < partPrefixToDrop) {
throw new HiveException(
"Cannot drop a subset of partitions in an archive, partition "
+ p.getName());
}
}
}
}
partsToDelete.addAll(partitions);
}
// drop all existing partitions from the list
for (Partition partition : partsToDelete) {
console.printInfo("Dropping the partition " + partition.getName());
db.dropPartition(dropTbl.getTableName(), partition.getValues(), true);
work.getOutputs().add(new WriteEntity(partition));
}
}
return 0;
}