Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DataInputBuffer


      IOException wrap = new IOException("Split class " + splitClass +
                                         " not found");
      wrap.initCause(exp);
      throw wrap;
    }
    DataInputBuffer splitBuffer = new DataInputBuffer();
    splitBuffer.reset(split.get(), 0, split.getSize());
    instantiatedSplit.readFields(splitBuffer);
   
    // if it is a file split, we can give more details
    if (instantiatedSplit instanceof FileSplit) {
      FileSplit fileSplit = (FileSplit) instantiatedSplit;
View Full Code Here


        value = (Writable)ReflectionUtils.newInstance(valClass, job);
      } catch (Exception e) {
        throw new RuntimeException(e);
      }

      DataInputBuffer keyIn = new DataInputBuffer();
      DataInputBuffer valIn = new DataInputBuffer();
      DataOutputBuffer valOut = new DataOutputBuffer();
      while (resultIter.next()) {
        keyIn.reset(resultIter.getKey().getData(),
                    resultIter.getKey().getLength());
        key.readFields(keyIn);
        valOut.reset();
        (resultIter.getValue()).writeUncompressedBytes(valOut);
        valIn.reset(valOut.getData(), valOut.getLength());
        value.readFields(valIn);

        writer.append(key, value);
      }
    }
View Full Code Here

    DataOutputStream out = new DataOutputStream(byteStream);
    Result.writeArray(out, results);

    byte [] rb = byteStream.toByteArray();

    DataInputBuffer in = new DataInputBuffer();
    in.reset(rb, 0, rb.length);

    Result [] deResults = Result.readArray(in);

    assertTrue(results.length == deResults.length);
View Full Code Here

    out = new DataOutputStream(byteStream);
    Result.writeArray(out, results);

    byte [] rb = byteStream.toByteArray();

    DataInputBuffer in = new DataInputBuffer();
    in.reset(rb, 0, rb.length);

    Result [] deResults = Result.readArray(in);

    assertTrue(deResults.length == 0);

    results = new Result[0];

    byteStream = new ByteArrayOutputStream();
    out = new DataOutputStream(byteStream);
    Result.writeArray(out, results);

    rb = byteStream.toByteArray();

    in = new DataInputBuffer();
    in.reset(rb, 0, rb.length);

    deResults = Result.readArray(in);

    assertTrue(deResults.length == 0);
View Full Code Here

      RandomDatum value = generator.getValue();
     
      key.write(data);
      value.write(data);
    }
    DataInputBuffer originalData = new DataInputBuffer();
    DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
    originalData.reset(data.getData(), 0, data.getLength());
   
    LOG.info("Generated " + count + " records");
   
    // Compress data
    DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
    CompressionOutputStream deflateFilter =
      codec.createOutputStream(compressedDataBuffer);
    DataOutputStream deflateOut =
      new DataOutputStream(new BufferedOutputStream(deflateFilter));
    deflateOut.write(data.getData(), 0, data.getLength());
    deflateOut.flush();
    deflateFilter.finish();
    LOG.info("Finished compressing data");
   
    // De-compress data
    DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
                                 compressedDataBuffer.getLength());
    CompressionInputStream inflateFilter =
      codec.createInputStream(deCompressedDataBuffer);
    DataInputStream inflateIn =
      new DataInputStream(new BufferedInputStream(inflateFilter));
View Full Code Here

  private ScanMetrics getScanMetrics(Scan scan) throws Exception {
    byte[] serializedMetrics = scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA);
    assertTrue("Serialized metrics were not found.", serializedMetrics != null);

    DataInputBuffer in = new DataInputBuffer();
    in.reset(serializedMetrics, 0, serializedMetrics.length);
    ScanMetrics scanMetrics = new ScanMetrics();
    scanMetrics.readFields(in);
    return scanMetrics;
  }
View Full Code Here

    }
   
    public void writeUncompressedBytes(DataOutputStream outStream)
      throws IOException {
      if (decompressedStream == null) {
        rawData = new DataInputBuffer();
        decompressedStream = codec.createInputStream(rawData);
      } else {
        decompressedStream.resetState();
      }
      rawData.reset(data, 0, dataSize);
View Full Code Here

        in.readFully(sync);                       // read sync bytes
      }
     
      // Initialize... *not* if this we are constructing a temporary Reader
      if (!tempReader) {
        valBuffer = new DataInputBuffer();
        if (decompress) {
          valDecompressor = CodecPool.getDecompressor(codec);
          valInFilter = codec.createInputStream(valBuffer, valDecompressor);
          valIn = new DataInputStream(valInFilter);
        } else {
          valIn = valBuffer;
        }

        if (blockCompressed) {
          keyLenBuffer = new DataInputBuffer();
          keyBuffer = new DataInputBuffer();
          valLenBuffer = new DataInputBuffer();

          keyLenDecompressor = CodecPool.getDecompressor(codec);
          keyLenInFilter = codec.createInputStream(keyLenBuffer,
                                                   keyLenDecompressor);
          keyLenIn = new DataInputStream(keyLenInFilter);
View Full Code Here

    cb.addRecord("baz".getBytes());
    Chunk c = cb.getChunk();
    DataOutputBuffer ob = new DataOutputBuffer(c.getSerializedSizeEstimate());
    try {
      c.write(ob);
      DataInputBuffer ib = new DataInputBuffer();
      ib.reset(ob.getData(), c.getSerializedSizeEstimate());
      int version = ib.readInt();
      assertEquals(version, ChunkImpl.PROTOCOL_VERSION);
    } catch (IOException e) {
      e.printStackTrace();
      fail("Should nor raise any exception");
    }
View Full Code Here

    cb.addRecord("baz".getBytes());
    Chunk c = cb.getChunk();
    DataOutputBuffer ob = new DataOutputBuffer(c.getSerializedSizeEstimate());
    try {
      c.write(ob);
      DataInputBuffer ib = new DataInputBuffer();
      ib.reset(ob.getData(), c.getSerializedSizeEstimate());
      // change current chunkImpl version
      ChunkImpl.PROTOCOL_VERSION = ChunkImpl.PROTOCOL_VERSION + 1;
      ChunkImpl.read(ib);
      fail("Should have raised an IOexception");
    } catch (IOException e) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DataInputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.