Package co.cask.cdap.common.io

Examples of co.cask.cdap.common.io.BinaryEncoder


  }

  @Override
  public byte[] encode(KeyIdentifier keyIdentifier) throws IOException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    Encoder encoder = new BinaryEncoder(bos);

    encoder.writeInt(KeyIdentifier.Schemas.getVersion());
    DatumWriter<KeyIdentifier> writer = writerFactory.create(KEY_IDENTIFIER_TYPE,
                                                             KeyIdentifier.Schemas.getCurrentSchema());
    writer.encode(keyIdentifier, encoder);
    return bos.toByteArray();
  }
View Full Code Here


  }

  @Override
  public byte[] encode(AccessToken token) throws IOException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    Encoder encoder = new BinaryEncoder(bos);

    encoder.writeInt(AccessToken.Schemas.getVersion());
    DatumWriter<AccessToken> writer = writerFactory.create(ACCESS_TOKEN_TYPE, AccessToken.Schemas.getCurrentSchema());
    writer.encode(token, encoder);
    return bos.toByteArray();
  }
View Full Code Here

  }

  @Override
  public byte[] encode(AccessTokenIdentifier identifier) throws IOException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    Encoder encoder = new BinaryEncoder(bos);

    encoder.writeInt(AccessTokenIdentifier.Schemas.getVersion());
    DatumWriter<AccessTokenIdentifier> writer = writerFactory.create(ACCESS_TOKEN_IDENTIFIER_TYPE,
                                                                     AccessTokenIdentifier.Schemas.getCurrentSchema());
    writer.encode(identifier, encoder);
    return bos.toByteArray();
  }
View Full Code Here

    long timestamp = event.getTimestamp();

    // Some assumption on the header size to minimize array copying
    // 16 bytes Schema hash + body size + (header size) * (50 bytes key/value pair) + 9 bytes timestamp (vlong encoding)
    ByteArrayOutputStream os = new ByteArrayOutputStream(16 + body.remaining() + headers.size() * 50 + 9);
    Encoder encoder = new BinaryEncoder(os);

    try {
      // Write the schema hash
      os.write(STREAM_EVENT_SCHEMA.getSchemaHash().toByteArray());

      StreamEventDataCodec.encode(event, encoder);
      encoder.writeLong(timestamp);
      return os.toByteArray();

    } catch (IOException e) {
      // It should never happens, otherwise something very wrong.
      throw Throwables.propagate(e);
View Full Code Here

  private static Function<OutputStream, Encoder> createEncoderFactory() {
    return new Function<OutputStream, Encoder>() {
      @Override
      public Encoder apply(OutputStream input) {
        return new BinaryEncoder(input);
      }
    };
  }
View Full Code Here

  }

  private byte[] encode(T object) {
    // encode T using schema
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    BinaryEncoder encoder = new BinaryEncoder(bos);
    try {
      this.datumWriter.encode(object, encoder);
    } catch (IOException e) {
      // SHOULD NEVER happen
      throw new DataSetException("Failed to encode object to be written: " + e.getMessage(), e);
View Full Code Here

  // many entries will have no hash keys. Serialize that once and for good
  private static byte[] serializeEmptyHashKeys() {
    try {
      // we don't synchronize here: the worst thing that go wrong here is repeated assignment to the same value
      ByteArrayOutputStream bos = new ByteArrayOutputStream();
      Encoder encoder = new BinaryEncoder(bos);
      encoder.writeInt(0);
      return bos.toByteArray();
    } catch (IOException e) {
      throw new RuntimeException("encoding empty hash keys went wrong - bailing out: " + e.getMessage(), e);
    }
  }
View Full Code Here

    // many entries will have no hash keys. Reuse a static value for that
    if (hashKeys == null || hashKeys.isEmpty()) {
      return SERIALIZED_EMPTY_HASH_KEYS;
    }
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    Encoder encoder = new BinaryEncoder(bos);
    encoder.writeInt(hashKeys.size());
    for (Map.Entry<String, Integer> entry : hashKeys.entrySet()) {
      encoder.writeString(entry.getKey()).writeInt(entry.getValue());
    }
    encoder.writeInt(0); // per Avro spec, end with a (block of length) zero
    return bos.toByteArray();
  }
View Full Code Here

  @Override
  public void emit(T data, Map<String, Object> partitions) {
    try {
      ByteArrayOutputStream output = new ByteArrayOutputStream();
      output.write(schemaHash);
      writer.encode(data, new BinaryEncoder(output));
      queueProducer.enqueue(new QueueEntry(Maps.transformValues(partitions, PARTITION_MAP_TRANSFORMER),
                                           output.toByteArray()));
    } catch (IOException e) {
      throw Throwables.propagate(e);
    }
View Full Code Here

    this.ack = ack;
    this.recordWriter = recordWriter;

    // Parent guarantees the publish method would not get called concurrently, hence safe to reuse the same instances.
    this.encoderOutputStream = new ByteArrayOutputStream(1024);
    this.encoder = new BinaryEncoder(encoderOutputStream);
  }
View Full Code Here

TOP

Related Classes of co.cask.cdap.common.io.BinaryEncoder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.