Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DataOutputBuffer$Buffer


        assertThat("パーティションが細かい", sequencialReadAve, greaterThan(500.0));
        assertThat("パーティションが粗い", sequencialReadAve, lessThan(10000.0));
    }

    static byte[] write(Writable writable) {
        DataOutputBuffer buffer = new DataOutputBuffer();
        buffer.reset();
        try {
            writable.write(buffer);
        } catch (IOException e) {
            throw new AssertionError(e);
        }
        return Arrays.copyOf(buffer.getData(), buffer.getLength());
    }
View Full Code Here


            throw new AssertionError(e);
        }
    }

    static byte[] write(Writable writable) {
        DataOutputBuffer buffer = new DataOutputBuffer();
        buffer.reset();
        try {
            writable.write(buffer);
        } catch (IOException e) {
            throw new AssertionError(e);
        }
        return Arrays.copyOf(buffer.getData(), buffer.getLength());
    }
View Full Code Here

        Class<?> type = loader.modelType("Simple");
        assertThat(type.isAnnotationPresent(ModelInputLocation.class), is(true));
        assertThat(type.isAnnotationPresent(ModelOutputLocation.class), is(true));

        ModelWrapper object = loader.newModel("Simple");
        DataOutputBuffer output = new DataOutputBuffer();
        ModelOutput<Object> modelOut = (ModelOutput<Object>) type.getAnnotation(ModelOutputLocation.class)
            .value()
            .getDeclaredConstructor(RecordEmitter.class)
            .newInstance(new TsvEmitter(new OutputStreamWriter(output, "UTF-8")));

        object.set("sid", 1L);
        object.set("value", new Text("hello"));
        modelOut.write(object.unwrap());

        object.set("sid", 2L);
        object.set("value", new Text("world"));
        modelOut.write(object.unwrap());

        object.set("sid", 3L);
        object.set("value", null);
        modelOut.write(object.unwrap());
        modelOut.close();

        DataInputBuffer input = new DataInputBuffer();
        input.reset(output.getData(), output.getLength());
        ModelInput<Object> modelIn = (ModelInput<Object>) type.getAnnotation(ModelInputLocation.class)
            .value()
            .getDeclaredConstructor(RecordParser.class)
            .newInstance(new TsvParser(new InputStreamReader(input, "UTF-8")));
        ModelWrapper copy = loader.newModel("Simple");
View Full Code Here

        object.set("type_decimal", new BigDecimal("1234.567"));
        object.set("type_text", new Text("Hello, world!"));
        object.set("type_date", new Date(2011, 3, 31));
        object.set("type_datetime", new DateTime(2011, 3, 31, 23, 30, 1));

        DataOutputBuffer output = new DataOutputBuffer();
        ModelOutput<Object> modelOut = (ModelOutput<Object>) type.getAnnotation(ModelOutputLocation.class)
            .value()
            .getDeclaredConstructor(RecordEmitter.class)
            .newInstance(new TsvEmitter(new OutputStreamWriter(output, "UTF-8")));
        modelOut.write(object.unwrap());
        modelOut.write(object.unwrap());
        modelOut.write(object.unwrap());
        modelOut.close();

        DataInputBuffer input = new DataInputBuffer();
        input.reset(output.getData(), output.getLength());
        ModelInput<Object> modelIn = (ModelInput<Object>) type.getAnnotation(ModelInputLocation.class)
            .value()
            .getDeclaredConstructor(RecordParser.class)
            .newInstance(new TsvParser(new InputStreamReader(input, "UTF-8")));
        ModelWrapper copy = loader.newModel("Primitives");
View Full Code Here

     */
    private void writeHeader() throws IOException {
      out.write(HBaseServer.HEADER.array());
      out.write(HBaseServer.CURRENT_VERSION);
      //When there are more fields we can have ConnectionHeader Writable.
      DataOutputBuffer buf = new DataOutputBuffer();
      header.write(buf);

      int bufLen = buf.getLength();
      out.writeInt(bufLen);
      out.write(buf.getData(), 0, bufLen);
    }
View Full Code Here

        return;
      }

      // For serializing the data to be written.

      final DataOutputBuffer d = new DataOutputBuffer();
      try {
        if (LOG.isDebugEnabled())
          LOG.debug(getName() + " sending #" + call.id);

        d.writeInt(0xdeadbeef); // placeholder for data length
        d.writeInt(call.id);
        call.param.write(d);
        byte[] data = d.getData();
        int dataLength = d.getLength();
        // fill in the placeholder
        Bytes.putInt(data, 0, dataLength - 4);
        //noinspection SynchronizeOnNonFinalField
        synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC
          out.write(data, 0, dataLength);
View Full Code Here

      // LocalStorageToken is needed irrespective of whether security is enabled
      // or not.
      TokenCache.setJobToken(jobToken, taskCredentials);

      DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
      LOG.info("Size of containertokens_dob is "
          + taskCredentials.numberOfTokens());
      taskCredentials.writeTokenStorageToStream(containerTokens_dob);
      taskCredentialsBuffer =
          ByteBuffer.wrap(containerTokens_dob.getData(), 0,
              containerTokens_dob.getLength());

      // Add shuffle secret key
      // The secret key is converted to a JobToken to preserve backwards
      // compatibility with an older ShuffleHandler running on an NM.
      LOG.info("Putting shuffle token in serviceData");
View Full Code Here

    // assert delegation tokens exist in rm1 DelegationTokenRenewr
    Assert.assertEquals(tokenSet, rm1.getRMContext()
      .getDelegationTokenRenewer().getDelegationTokens());

    // assert delegation tokens are saved
    DataOutputBuffer dob = new DataOutputBuffer();
    ts.writeTokenStorageToStream(dob);
    ByteBuffer securityTokens =
        ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    securityTokens.rewind();
    Assert.assertEquals(securityTokens, appState
      .getApplicationSubmissionContext().getAMContainerSpec()
      .getTokens());
View Full Code Here

    runTestCycle(kvs);
  }

  private void runTestCycle(List<KeyValue> kvs) throws Exception {
    CompressionContext ctx = new CompressionContext(LRUDictionary.class);
    DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
    for (KeyValue kv : kvs) {
      KeyValueCompression.writeKV(buf, kv, ctx);
    }

    ctx.clear();
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(
        buf.getData(), 0, buf.getLength()));
    for (KeyValue kv : kvs) {
      KeyValue readBack = KeyValueCompression.readKV(in, ctx);
      assertEquals(kv, readBack);
    }
  }
View Full Code Here

    table.close();
  }

  @Test
  public void testExecDeserialization() throws IOException {
    DataOutputBuffer dob = new DataOutputBuffer();
    dob.writeUTF(methodName);
    dob.writeInt(1);
    Scan scan = new Scan();
    HbaseObjectWritable.writeObject(dob, scan, Scan.class, new Configuration());
    dob.writeUTF("org.apache.hadoop.hbase.client.Scan");
    Bytes.writeByteArray(dob, new byte[]{'a'});
    // this is the dynamic protocol name
    dob.writeUTF(protocolName);

    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(dob.getData(), dob.getLength());

    Exec after = new Exec();
    after.setConf(HBaseConfiguration.create());
    after.readFields(dib);
    // no error thrown
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DataOutputBuffer$Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.