//While it is possible to write a record to multiple locations within the Hive, the default implementation
//inserts a single copy.
hive.directory().insertPrimaryIndexKey(spork.getType());
//Next we insert the record into the assigned data node
Collection<SimpleJdbcDaoSupport> sporkDaos = hive.connection().daoSupport().get(spork.getType(), AccessType.ReadWrite);
PreparedStatementCreatorFactory stmtFactory =
new PreparedStatementCreatorFactory(productInsertSql, new int[]{Types.INTEGER, Types.VARCHAR, Types.VARCHAR});
Object[] parameters = new Object[]{spork.getId(), spork.getName(), spork.getType()};
for (JdbcDaoSupport dao : sporkDaos)
dao.getJdbcTemplate().update(stmtFactory.newPreparedStatementCreator(parameters));
//Update the resource id so that the hive can locate it
hive.directory().insertResourceId(resourceName, spork.getId(), spork.getType());
//Finally we update the SecondaryIndex
hive.directory().insertSecondaryIndexKey(resourceName, "name", spork.getName(), spork.getId());