throw new RuntimeException(e1);
}
ByteBuffer key = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, docId);
ReadCommand rc = new SliceByNamesReadCommand(CassandraUtils.keySpace, key, CassandraUtils.metaColumnPath,
Arrays.asList(CassandraUtils.documentMetaFieldBytes));
List<Row> rows = null;
try
{
rows = CassandraUtils.robustRead(CassandraUtils.consistency, rc);
if (rows.isEmpty())
{
return; // this docId is missing
}
DocumentMetadata allTerms = IndexWriter.fromBytesUsingThrift(rows.get(0).cf.getColumn(
CassandraUtils.documentMetaFieldBytes).value());
List<ReadCommand> readCommands = new ArrayList<ReadCommand>();
for (ThriftTerm t : allTerms.getTerms())
{
// skip the ones not of this field
if (!t.getField().equals(field))
continue;
// add to multiget params
try
{
key = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, t.getField()
.getBytes("UTF-8"), CassandraUtils.delimeterBytes, t.getText());
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("JVM doesn't support UTF-8", e);
}
readCommands.add(new SliceByNamesReadCommand(CassandraUtils.keySpace, key, new ColumnParent()
.setColumn_family(CassandraUtils.termVecColumnFamily), Arrays.asList(ByteBuffer
.wrap(CassandraUtils.writeVInt(docI)))));
}
rows = CassandraUtils.robustRead(CassandraUtils.consistency, readCommands.toArray(new ReadCommand[] {}));