client.dropDatabase(dbName);
boolean ret = client.createDatabase(dbName, "strange_loc");
assertTrue("Unable to create the databse " + dbName, ret);
ArrayList<FieldSchema> invCols = new ArrayList<FieldSchema>(2);
invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, ""));
invCols.add(new FieldSchema("in.come", Constants.INT_TYPE_NAME, ""));
Table tbl = new Table();
tbl.setDbName(dbName);
tbl.setTableName(invTblName);
StorageDescriptor sd = new StorageDescriptor();
tbl.setSd(sd);
sd.setCols(invCols);
sd.setCompressed(false);
sd.setNumBuckets(1);
sd.setParameters(new HashMap<String, String>());
sd.getParameters().put("test_param_1", "Use this for comments etc");
sd.setBucketCols(new ArrayList<String>(2));
sd.getBucketCols().add("name");
sd.setSerdeInfo(new SerDeInfo());
sd.getSerdeInfo().setName(tbl.getTableName());
sd.getSerdeInfo().setParameters(new HashMap<String, String>());
sd.getSerdeInfo().getParameters().put(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
boolean failed = false;
try {
client.createTable(tbl);
} catch (InvalidObjectException ex) {
failed = true;
}
if(!failed) {
assertTrue("Able to create table with invalid name: " + invTblName, false);
}
ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
// create a valid table
tbl.setTableName(tblName);
tbl.getSd().setCols(cols);
client.createTable(tbl);