.put("hive.s3.staging-directory", "/s3-staging")
.build();
HiveClientConfig expected = new HiveClientConfig()
.setTimeZone(nonDefaultTimeZone().toTimeZone())
.setMaxSplitSize(new DataSize(256, Unit.MEGABYTE))
.setMaxOutstandingSplits(10)
.setMaxGlobalSplitIteratorThreads(10)
.setMaxSplitIteratorThreads(2)
.setAllowDropTable(true)
.setAllowRenameTable(true)
.setMetastoreCacheTtl(new Duration(2, TimeUnit.HOURS))
.setMetastoreRefreshInterval(new Duration(30, TimeUnit.MINUTES))
.setMaxMetastoreRefreshThreads(2500)
.setMetastoreSocksProxy(HostAndPort.fromParts("localhost", 1080))
.setMetastoreTimeout(new Duration(20, TimeUnit.SECONDS))
.setMinPartitionBatchSize(1)
.setMaxPartitionBatchSize(1000)
.setMaxInitialSplits(10)
.setMaxInitialSplitSize(new DataSize(16, Unit.MEGABYTE))
.setDfsTimeout(new Duration(33, TimeUnit.SECONDS))
.setDfsConnectTimeout(new Duration(20, TimeUnit.SECONDS))
.setDfsConnectMaxRetries(10)
.setVerifyChecksum(false)
.setResourceConfigFiles(ImmutableList.of("/foo.xml", "/bar.xml"))
// .setHiveStorageFormat(HiveStorageFormat.SEQUENCEFILE)
.setDomainSocketPath("/foo")
.setS3AwsAccessKey("abc123")
.setS3AwsSecretKey("secret")
.setS3SslEnabled(false)
.setS3MaxClientRetries(9)
.setS3MaxErrorRetries(8)
.setS3MaxBackoffTime(new Duration(4, TimeUnit.MINUTES))
.setS3ConnectTimeout(new Duration(8, TimeUnit.SECONDS))
.setS3SocketTimeout(new Duration(4, TimeUnit.MINUTES))
.setS3MultipartMinFileSize(new DataSize(32, Unit.MEGABYTE))
.setS3MultipartMinPartSize(new DataSize(15, Unit.MEGABYTE))
.setS3MaxConnections(77)
.setS3StagingDirectory(new File("/s3-staging"));
ConfigAssertions.assertFullMapping(properties, expected);
}