+ FlumeConfiguration.COLLECTOR_DFS_COMPRESS_GZIP
+ " is deprecated, please use "
+ FlumeConfiguration.COLLECTOR_DFS_COMPRESS_CODEC
+ " set to GzipCodec instead");
CompressionCodec gzipC = new GzipCodec();
Compressor gzCmp = gzipC.createCompressor();
dstPath = new Path(path + gzipC.getDefaultExtension());
hdfs = dstPath.getFileSystem(conf);
writer = hdfs.create(dstPath);
writer = gzipC.createOutputStream(writer, gzCmp);
LOG.info("Creating HDFS gzip compressed file: " + dstPath.toString());
return;
}
String codecName = conf.getCollectorDfsCompressCodec();
List<Class<? extends CompressionCodec>> codecs = CompressionCodecFactory
.getCodecClasses(FlumeConfiguration.get());
CompressionCodec codec = null;
ArrayList<String> codecStrs = new ArrayList<String>();
codecStrs.add("None");
for (Class<? extends CompressionCodec> cls : codecs) {
codecStrs.add(cls.getSimpleName());
if (cls.getSimpleName().equals(codecName)) {
try {
codec = cls.newInstance();
} catch (InstantiationException e) {
LOG.error("Unable to instantiate " + codec + " class");
} catch (IllegalAccessException e) {
LOG.error("Unable to access " + codec + " class");
}
}
}
if (codec == null) {
if (!codecName.equals("None")) {
LOG.warn("Unsupported compression codec " + codecName
+ ". Please choose from: " + codecStrs);
}
dstPath = new Path(path);
hdfs = dstPath.getFileSystem(conf);
writer = hdfs.create(dstPath);
LOG.info("Creating HDFS file: " + dstPath.toString());
return;
}
Compressor cmp = codec.createCompressor();
dstPath = new Path(path + codec.getDefaultExtension());
hdfs = dstPath.getFileSystem(conf);
writer = hdfs.create(dstPath);
try {
writer = codec.createOutputStream(writer, cmp);