}
final Boolean useThreadPool = settings.getAsBoolean("index.image.use_thread_pool", true);
final Boolean ignoreMetadataError = settings.getAsBoolean("index.image.ignore_metadata_error", true);
BufferedImage img = ImageIO.read(new BytesStreamInput(content, false));
if (Math.max(img.getHeight(), img.getWidth()) > MAX_IMAGE_DIMENSION) {
img = ImageUtils.scaleImage(img, MAX_IMAGE_DIMENSION);
}
final BufferedImage finalImg = img;
final Map<FeatureEnum, LireFeature> featureExtractMap = new MapMaker().makeMap();
// have multiple features, use ThreadPool to process each feature
if (useThreadPool && features.size() > 1) {
final CountDownLatch latch = new CountDownLatch(features.size());
Executor executor = threadPool.generic();
for (ObjectObjectCursor<FeatureEnum, Map<String, Object>> cursor : features) {
final FeatureEnum featureEnum = cursor.key;
executor.execute(new Runnable() {
@Override
public void run() {
try {
LireFeature lireFeature = featureEnum.getFeatureClass().newInstance();
lireFeature.extract(finalImg);
featureExtractMap.put(featureEnum, lireFeature);
} catch (Throwable e){
logger.error("Failed to extract feature from image", e);
} finally {
latch.countDown();
}
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
logger.debug("Interrupted extract feature from image", e);
Thread.currentThread().interrupt();
}
}
for (ObjectObjectCursor<FeatureEnum, Map<String, Object>> cursor : features) {
FeatureEnum featureEnum = cursor.key;
Map<String, Object> featureMap = cursor.value;
try {
LireFeature lireFeature;
if (featureExtractMap.containsKey(featureEnum)) { // already processed
lireFeature = featureExtractMap.get(featureEnum);
} else {
lireFeature = featureEnum.getFeatureClass().newInstance();
lireFeature.extract(img);
}
byte[] parsedContent = lireFeature.getByteArrayRepresentation();
Mapper featureMapper = featureMappers.get(featureEnum.name());
context.externalValue(parsedContent);
featureMapper.parse(context);
context.doc().add(new BinaryDocValuesField(name() + "." + featureEnum.name(), new BytesRef(parsedContent)));
// add hash if required
if (featureMap.containsKey(HASH)) {
List<String> hashes = (List<String>) featureMap.get(HASH);
for (String h : hashes) {
HashEnum hashEnum = HashEnum.valueOf(h);
int[] hashVals = null;
if (hashEnum.equals(HashEnum.BIT_SAMPLING)) {
hashVals = BitSampling.generateHashes(lireFeature.getDoubleHistogram());
} else if (hashEnum.equals(HashEnum.LSH)) {
hashVals = LocalitySensitiveHashing.generateHashes(lireFeature.getDoubleHistogram());
}
String mapperName = featureEnum.name() + "." + HASH + "." + h;
Mapper hashMapper = hashMappers.get(mapperName);
context.externalValue(SerializationUtils.arrayToString(hashVals));
hashMapper.parse(context);
}
}
} catch (Exception e) {
throw new ElasticsearchImageProcessException("Failed to index feature " + featureEnum.name(), e);
}
}
// process metadata if required
if (!metadataMappers.isEmpty()) {
try {
Metadata metadata = ImageMetadataReader.readMetadata(new BufferedInputStream(new BytesStreamInput(content, false)), false);
for (Directory directory : metadata.getDirectories()) {
for (Tag tag : directory.getTags()) {
String metadataName = tag.getDirectoryName().toLowerCase().replaceAll("\\s+", "_") + "." +
tag.getTagName().toLowerCase().replaceAll("\\s+", "_");
if (metadataMappers.containsKey(metadataName)) {