Package org.elasticsearch.common.io.stream

Examples of org.elasticsearch.common.io.stream.BytesStreamInput


        operation.readFrom(in);
        return operation;
    }

    public static BytesHolder readSource(byte[] data) throws IOException {
        BytesStreamInput in = new BytesStreamInput(data);
        in.readInt(); // the size header
        Translog.Operation.Type type = Translog.Operation.Type.fromId(in.readByte());
        Translog.Operation operation;
        switch (type) {
            case CREATE:
                operation = new Translog.Create();
                break;
View Full Code Here


        restDocumentSource(source, 0, source.length, builder, params);
    }

    public static void restDocumentSource(byte[] source, int offset, int length, XContentBuilder builder, ToXContent.Params params) throws IOException {
        if (LZF.isCompressed(source, offset, length)) {
            BytesStreamInput siBytes = new BytesStreamInput(source, offset, length);
            LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
            XContentType contentType = XContentFactory.xContentType(siLzf);
            siLzf.resetToBufferStart();
            if (contentType == builder.contentType()) {
                builder.rawField("_source", siLzf);
View Full Code Here

    ThreadPool threadPool() {
        return this.threadPool;
    }

    void messageReceived(byte[] data, String action, LocalTransport sourceTransport, @Nullable final Long sendRequestId) {
        StreamInput stream = new BytesStreamInput(data);
        stream = CachedStreamInput.cachedHandles(stream);

        try {
            long requestId = stream.readLong();
            byte status = stream.readByte();
            boolean isRequest = TransportStreams.statusIsRequest(status);

            if (isRequest) {
                handleRequest(stream, requestId, sourceTransport);
            } else {
View Full Code Here

            cacheBuffer.clear();
            cacheBuffer.limit(opSize);
            channel.read(cacheBuffer, position);
            cacheBuffer.flip();
            position += opSize;
            lastOperationRead = TranslogStreams.readTranslogOperation(new BytesStreamInput(cacheBuffer.array(), 0, opSize));
            return true;
        } catch (Exception e) {
            return false;
        }
    }
View Full Code Here

        }

        String parsedContent;
        try {
            // Set the maximum length of strings returned by the parseToString method, -1 sets no limit
            parsedContent = tika().parseToString(new BytesStreamInput(content, false), metadata, indexedChars);
        } catch (Throwable e) {
            // #18: we could ignore errors when Tika does not parse data
            if (!ignoreErrors) {
                throw new MapperParsingException("Failed to extract [" + indexedChars + "] characters of text for [" + name + "]", e);
            } else {
View Full Code Here

        return new BytesArray(bytes, offset + from, length);
    }

    @Override
    public StreamInput streamInput() {
        return new BytesStreamInput(bytes, offset, length, false);
    }
View Full Code Here

    private <T extends InternalFacet> void serializeAndDeserialize(final T toSend, final T toReceive) throws Exception {
        final BytesStreamOutput bso = new BytesStreamOutput();
        toSend.writeTo(bso);
        bso.close();
        final BytesReference bytes = bso.bytes();
        final BytesStreamInput bsi = new BytesStreamInput(bytes);
        toReceive.readFrom(bsi);
        bsi.close();
        assertEquals(toSend.getName(), toReceive.getName());
        assertEquals(toSend.getType(), toReceive.getType());
    }
View Full Code Here

    private <T extends InternalFacet> void serializeAndDeserialize(final T toSend, final T toReceive) throws Exception {
        final BytesStreamOutput bso = new BytesStreamOutput();
        toSend.writeTo(bso);
        bso.close();
        final BytesReference bytes = bso.bytes();
        final BytesStreamInput bsi = new BytesStreamInput(bytes);
        toReceive.readFrom(bsi);
        bsi.close();
        assertEquals(toSend.getName(), toReceive.getName());
        assertEquals(toSend.getType(), toReceive.getType());
    }
View Full Code Here

        LireFeature feature = null;

        if (image != null) {
            try {
                feature = featureEnum.getFeatureClass().newInstance();
                BufferedImage img = ImageIO.read(new BytesStreamInput(image, false));
                if (Math.max(img.getHeight(), img.getWidth()) > ImageMapper.MAX_IMAGE_DIMENSION) {
                    img = ImageUtils.scaleImage(img, ImageMapper.MAX_IMAGE_DIMENSION);
                }
                feature.extract(img);
            } catch (Exception e) {
View Full Code Here

        }

        final Boolean useThreadPool = settings.getAsBoolean("index.image.use_thread_pool", true);
        final Boolean ignoreMetadataError = settings.getAsBoolean("index.image.ignore_metadata_error", true);

        BufferedImage img = ImageIO.read(new BytesStreamInput(content, false));
        if (Math.max(img.getHeight(), img.getWidth()) > MAX_IMAGE_DIMENSION) {
            img = ImageUtils.scaleImage(img, MAX_IMAGE_DIMENSION);
        }
        final BufferedImage finalImg = img;



        final Map<FeatureEnum, LireFeature> featureExtractMap = new MapMaker().makeMap();

        // have multiple features, use ThreadPool to process each feature
        if (useThreadPool && features.size() > 1) {
            final CountDownLatch latch = new CountDownLatch(features.size());
            Executor executor = threadPool.generic();

            for (ObjectObjectCursor<FeatureEnum, Map<String, Object>> cursor : features) {
                final FeatureEnum featureEnum = cursor.key;
                executor.execute(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            LireFeature lireFeature = featureEnum.getFeatureClass().newInstance();
                            lireFeature.extract(finalImg);
                            featureExtractMap.put(featureEnum, lireFeature);
                        } catch (Throwable e){
                            logger.error("Failed to extract feature from image", e);
                        } finally {
                            latch.countDown();
                        }
                    }
                });
            }
            try {
                latch.await();
            } catch (InterruptedException e) {
                logger.debug("Interrupted extract feature from image", e);
                Thread.currentThread().interrupt();
            }
        }


        for (ObjectObjectCursor<FeatureEnum, Map<String, Object>> cursor : features) {
            FeatureEnum featureEnum = cursor.key;
            Map<String, Object> featureMap = cursor.value;

            try {
                LireFeature lireFeature;
                if (featureExtractMap.containsKey(featureEnum)) {   // already processed
                    lireFeature = featureExtractMap.get(featureEnum);
                } else {
                    lireFeature = featureEnum.getFeatureClass().newInstance();
                    lireFeature.extract(img);
                }
                byte[] parsedContent = lireFeature.getByteArrayRepresentation();

                Mapper featureMapper = featureMappers.get(featureEnum.name());
                context.externalValue(parsedContent);
                featureMapper.parse(context);
                context.doc().add(new BinaryDocValuesField(name() + "." + featureEnum.name(), new BytesRef(parsedContent)));

                // add hash if required
                if (featureMap.containsKey(HASH)) {
                    List<String> hashes = (List<String>) featureMap.get(HASH);
                    for (String h : hashes) {
                        HashEnum hashEnum = HashEnum.valueOf(h);
                        int[] hashVals = null;
                        if (hashEnum.equals(HashEnum.BIT_SAMPLING)) {
                            hashVals = BitSampling.generateHashes(lireFeature.getDoubleHistogram());
                        } else if (hashEnum.equals(HashEnum.LSH)) {
                            hashVals = LocalitySensitiveHashing.generateHashes(lireFeature.getDoubleHistogram());
                        }

                        String mapperName = featureEnum.name() + "." + HASH + "." + h;
                        Mapper hashMapper = hashMappers.get(mapperName);
                        context.externalValue(SerializationUtils.arrayToString(hashVals));
                        hashMapper.parse(context);
                    }
                }
            } catch (Exception e) {
                throw new ElasticsearchImageProcessException("Failed to index feature " + featureEnum.name(), e);
            }
        }

        // process metadata if required
        if (!metadataMappers.isEmpty()) {
            try {
                Metadata metadata = ImageMetadataReader.readMetadata(new BufferedInputStream(new BytesStreamInput(content, false)), false);
                for (Directory directory : metadata.getDirectories()) {
                    for (Tag tag : directory.getTags()) {
                        String metadataName = tag.getDirectoryName().toLowerCase().replaceAll("\\s+", "_") + "." +
                                tag.getTagName().toLowerCase().replaceAll("\\s+", "_");
                        if (metadataMappers.containsKey(metadataName)) {
View Full Code Here

TOP

Related Classes of org.elasticsearch.common.io.stream.BytesStreamInput

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.