import org.apache.hadoop.hive.common.type.HiveVarchar;
import java.io.*;
public class TestHiveVarcharWritable extends TestCase {
public void testStringLength() throws Exception {
HiveVarcharWritable vc1 = new HiveVarcharWritable(new HiveVarchar("0123456789", 10));
assertEquals(10, vc1.getCharacterLength());
// Changing string value; getCharacterLength() should update accordingly
vc1.set("012345678901234");
assertEquals(15, vc1.getCharacterLength());
vc1.set(new HiveVarcharWritable(new HiveVarchar("01234", -1)));
assertEquals(5, vc1.getCharacterLength());
vc1.set(new HiveVarchar("012345", -1));
assertEquals(6, vc1.getCharacterLength());
vc1.set("0123456", -1);
assertEquals(7, vc1.getCharacterLength());
vc1.set(new HiveVarcharWritable(new HiveVarchar("01234567", -1)), -1);
assertEquals(8, vc1.getCharacterLength());
// string length should work after enforceMaxLength()
vc1.enforceMaxLength(3);
assertEquals(3, vc1.getCharacterLength());
// string length should work after readFields()
ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();
HiveVarcharWritable vc2 = new HiveVarcharWritable(new HiveVarchar("abcdef", -1));
vc2.write(new DataOutputStream(outputBytes));
vc1.readFields(new DataInputStream(new ByteArrayInputStream(outputBytes.toByteArray())));
assertEquals(6, vc1.getCharacterLength());
}