Examples of BytesStreamOutput


Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

        }
    }

    private BytesReference bytes() {
        assert values.size() > 0;
        BytesStreamOutput out = new BytesStreamOutput(estimateSize(values));
        try {
            encodeValues(out);
            out.close();
        } catch (IOException e) {
            //
        }
        return out.bytes();
    }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

                for (int c = 0; c < numColumns; c++) {
                    rows[r][c] = streamers[c].readValueFrom(in);
                }
            }
        } else {
            memoryStream = new BytesStreamOutput();
            Streams.copy(in, memoryStream);
        }
    }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

                return null;
            }
            Map<String, Object> filteredMap = XContentMapValues.filter(value, includes, excludes);
            try {
                BytesReference bytes = new XContentBuilder(Requests.INDEX_CONTENT_TYPE.xContent(),
                        new BytesStreamOutput(lastSourceSize)).map(filteredMap).bytes();
                lastSourceSize = bytes.length();
                return bytes;
            } catch (IOException ex) {
                logger.error("could not parse xContent", ex);
            }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

        }
        return result;
    }

    public static BytesReference encodeSettings(Settings settings) throws IOException {
        BytesStreamOutput bso = new BytesStreamOutput();
        XContentBuilder builder = XContentFactory.jsonBuilder(bso);
        builder.startObject();
        for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
            builder.field(entry.getKey(), entry.getValue());
        }
        builder.endObject();
        builder.flush();
        return bso.bytes();
    }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

            }
        }

        searchHit.shardTarget(context.shardTarget());
        exportFields.hit(searchHit);
        BytesStreamOutput os = new BytesStreamOutput();
        XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(XContentType.JSON), os);
        exportFields.toXContent(builder, ToXContent.EMPTY_PARAMS);
        builder.flush();
        BytesReference bytes = os.bytes();
        out.write(bytes.array(), bytes.arrayOffset(), bytes.length());
        out.write('\n');
        out.flush();
        numExported++;
    }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

        return new MergeResult(mergeContext.buildConflicts());
    }

    public CompressedString refreshSource() throws ElasticsearchGenerationException {
        try {
            BytesStreamOutput bStream = new BytesStreamOutput();
            XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream));
            builder.startObject();
            toXContent(builder, ToXContent.EMPTY_PARAMS);
            builder.endObject();
            builder.close();
            return mappingSource = new CompressedString(bStream.bytes());
        } catch (Exception e) {
            throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
        }
    }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
        boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
        if (!includeDefaults) {
            // simulate the generation to make sure we don't add unnecessary content if all is default
            // if all are defaults, no need to write it at all - generating is twice is ok though
            BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0);
            XContentBuilder b =  new XContentBuilder(builder.contentType().xContent(), bytesStreamOutput);
            long pos = bytesStreamOutput.position();
            innerToXContent(b, false);
            b.flush();
            if (pos == bytesStreamOutput.position()) {
                return builder;
            }
        }
        builder.startObject(CONTENT_TYPE);
        innerToXContent(builder, includeDefaults);
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

        if (value == null) {
            return;
        }
        if (compress != null && compress && !CompressorFactory.isCompressed(value, 0, value.length)) {
            if (compressThreshold == -1 || value.length > compressThreshold) {
                BytesStreamOutput bStream = new BytesStreamOutput();
                StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream);
                stream.writeBytes(value, 0, value.length);
                stream.close();
                value = bStream.bytes().toBytes();
            }
        }
        if (fieldType().stored()) {
            fields.add(new Field(names.indexName(), value, fieldType));
        }
View Full Code Here

Examples of org.elasticsearch.common.io.stream.BytesStreamOutput

        if (filtered) {
            // we don't update the context source if we filter, we want to keep it as is...

            Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true);
            Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes);
            BytesStreamOutput bStream = new BytesStreamOutput();
            StreamOutput streamOutput = bStream;
            if (compress != null && compress && (compressThreshold == -1 || source.length() > compressThreshold)) {
                streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
            }
            XContentType contentType = formatContentType;
            if (contentType == null) {
                contentType = mapTuple.v1();
            }
            XContentBuilder builder = XContentFactory.contentBuilder(contentType, streamOutput).map(filteredSource);
            builder.close();

            source = bStream.bytes();
        } else if (compress != null && compress && !CompressorFactory.isCompressed(source)) {
            if (compressThreshold == -1 || source.length() > compressThreshold) {
                BytesStreamOutput bStream = new BytesStreamOutput();
                XContentType contentType = XContentFactory.xContentType(source);
                if (formatContentType != null && formatContentType != contentType) {
                    XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, CompressorFactory.defaultCompressor().streamOutput(bStream));
                    builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
                    builder.close();
                } else {
                    StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
                    source.writeTo(streamOutput);
                    streamOutput.close();
                }
                source = bStream.bytes();
                // update the data in the context, so it can be compressed and stored compressed outside...
                context.source(source);
            }
        } else if (formatContentType != null) {
            // see if we need to convert the content type
            Compressor compressor = CompressorFactory.compressor(source);
            if (compressor != null) {
                CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
                XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
                compressedStreamInput.resetToBufferStart();
                if (contentType != formatContentType) {
                    // we need to reread and store back, compressed....
                    BytesStreamOutput bStream = new BytesStreamOutput();
                    StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
                    XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, streamOutput);
                    builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(compressedStreamInput));
                    builder.close();
                    source = bStream.bytes();
                    // update the data in the context, so we store it in the translog in this format
                    context.source(source);
                } else {
                    compressedStreamInput.close();
                }
            } else {
                XContentType contentType = XContentFactory.xContentType(source);
                if (contentType != formatContentType) {
                    // we need to reread and store back
                    // we need to reread and store back, compressed....
                    BytesStreamOutput bStream = new BytesStreamOutput();
                    XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, bStream);
                    builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
                    builder.close();
                    source = bStream.bytes();
                    // update the data in the context, so we store it in the translog in this format
                    context.source(source);
                }
            }
        }
View Full Code Here

Examples of org.xbib.elasticsearch.common.io.stream.BytesStreamOutput

        }

        byte status = 0;
        status = TransportStatus.setRequest(status);

        BytesStreamOutput bStream = new BytesStreamOutput();
        bStream.skip(NettyHeader.HEADER_SIZE);
        StreamOutput stream = bStream;
        stream = new HandlesStreamOutput(stream);

        // we pick the smallest of the 2, to support both backward and forward compatibility
        // note, this is the only place we need to do this, since from here on, we use the serialized version
        // as the version to use also when the node receiving this request will send the response with
        Version version = Version.smallest(this.version, node.version());

        stream.setVersion(version);
        stream.writeString(action);

        ChannelBuffer buffer;
        request.writeTo(stream);
        stream.close();
        buffer = bStream.ourBytes().toChannelBuffer();
        NettyHeader.writeHeader(buffer, requestId, status, version);
        targetChannel.write(buffer);
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.