Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.CompressionOutputStream


      key.numberRows = bufferedRecords;

      Compressor compressor = null;
      NonSyncDataOutputBuffer valueBuffer = null;
      CompressionOutputStream deflateFilter = null;
      DataOutputStream deflateOut = null;
      boolean isCompressed = isCompressed();
      int valueLength = 0;
      if (isCompressed) {
        ReflectionUtils.setConf(codec, this.conf);
        compressor = CodecPool.getCompressor(codec);
        valueBuffer = new NonSyncDataOutputBuffer();
        deflateFilter = codec.createOutputStream(valueBuffer, compressor);
        deflateOut = new DataOutputStream(deflateFilter);
      }

      for (int columnIndex = 0; columnIndex < columnNumber; columnIndex++) {
        ColumnBuffer currentBuf = columnBuffers[columnIndex];
        currentBuf.flushGroup();

        NonSyncDataOutputBuffer columnValue = currentBuf.columnValBuffer;
        int colLen;
        int plainLen = columnValuePlainLength[columnIndex];

        if (isCompressed) {
          if (deflateFilter instanceof SchemaAwareCompressionOutputStream) {
            ((SchemaAwareCompressionOutputStream)deflateFilter).
              setColumnIndex(columnIndex);
          }
          deflateFilter.resetState();
          deflateOut.write(columnValue.getData(), 0, columnValue.getLength());
          deflateOut.flush();
          deflateFilter.finish();
          // find how much compressed data was added for this column
          colLen = valueBuffer.getLength() - valueLength;
        } else {
          colLen = columnValuePlainLength[columnIndex];
        }
View Full Code Here


      if(this.isCompressed()) {
        Compressor compressor = CodecPool.getCompressor(codec);
        NonSyncDataOutputBuffer compressionBuffer =
          new NonSyncDataOutputBuffer();
        CompressionOutputStream deflateFilter =
          codec.createOutputStream(compressionBuffer, compressor);
        DataOutputStream deflateOut = new DataOutputStream(deflateFilter);
        //compress key and write key out
        compressionBuffer.reset();
        deflateFilter.resetState();
        keyBuffer.write(deflateOut);
        deflateOut.flush();
        deflateFilter.finish();
        int compressedKeyLen = compressionBuffer.getLength();
        out.writeInt(compressedKeyLen);
        out.write(compressionBuffer.getData(), 0, compressedKeyLen);
        CodecPool.returnCompressor(compressor);
      } else {
View Full Code Here

        public void writeTestData(File file, int recordCounts, int columnCount,
                String colSeparator) throws IOException {

            // write random test data
            GzipCodec gzipCodec = new GzipCodec();
            CompressionOutputStream out = gzipCodec
                    .createOutputStream(new FileOutputStream(file));
            BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
                    out));

            try {

                for (int r = 0; r < recordCounts; r++) {
                    // foreach row write n columns

                    for (int c = 0; c < columnCount; c++) {

                        if (c != 0) {
                            writer.append(colSeparator);
                        }

                        writer.append(String.valueOf(Math.random()));

                    }
                    writer.append("\n");

                }

            } finally {
                writer.close();
                out.close();
            }

        }
View Full Code Here

   
    LOG.info("Generated " + count + " records");
   
    // Compress data
    DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
    CompressionOutputStream deflateFilter =
      codec.createOutputStream(compressedDataBuffer);
    DataOutputStream deflateOut =
      new DataOutputStream(new BufferedOutputStream(deflateFilter));
    deflateOut.write(data.getData(), 0, data.getLength());
    deflateOut.flush();
    deflateFilter.finish();
    LOG.info("Finished compressing data");
   
    // De-compress data
    DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
View Full Code Here

      key.numberRows = bufferedRecords;

      Compressor compressor = null;
      NonSyncDataOutputBuffer valueBuffer = null;
      CompressionOutputStream deflateFilter = null;
      DataOutputStream deflateOut = null;
      boolean isCompressed = isCompressed();
      int valueLength = 0;
      if (isCompressed) {
        ReflectionUtils.setConf(codec, this.conf);
        compressor = CodecPool.getCompressor(codec);
        valueBuffer = new NonSyncDataOutputBuffer();
        deflateFilter = codec.createOutputStream(valueBuffer, compressor);
        deflateOut = new DataOutputStream(deflateFilter);
      }

      for (int columnIndex = 0; columnIndex < columnNumber; columnIndex++) {
        ColumnBuffer currentBuf = columnBuffers[columnIndex];
        currentBuf.flushGroup();

        NonSyncDataOutputBuffer columnValue = currentBuf.columnValBuffer;
        int colLen;
        int plainLen = columnValuePlainLength[columnIndex];

        if (isCompressed) {
          if (deflateFilter instanceof SchemaAwareCompressionOutputStream) {
            ((SchemaAwareCompressionOutputStream)deflateFilter).
              setColumnIndex(columnIndex);
          }
          deflateFilter.resetState();
          deflateOut.write(columnValue.getData(), 0, columnValue.getLength());
          deflateOut.flush();
          deflateFilter.finish();
          // find how much compressed data was added for this column
          colLen = valueBuffer.getLength() - valueLength;
        } else {
          colLen = columnValuePlainLength[columnIndex];
        }
View Full Code Here

      if(this.isCompressed()) {
        Compressor compressor = CodecPool.getCompressor(codec);
        NonSyncDataOutputBuffer compressionBuffer =
          new NonSyncDataOutputBuffer();
        CompressionOutputStream deflateFilter =
          codec.createOutputStream(compressionBuffer, compressor);
        DataOutputStream deflateOut = new DataOutputStream(deflateFilter);
        //compress key and write key out
        compressionBuffer.reset();
        deflateFilter.resetState();
        keyBuffer.write(deflateOut);
        deflateOut.flush();
        deflateFilter.finish();
        int compressedKeyLen = compressionBuffer.getLength();
        out.writeInt(compressedKeyLen);
        out.write(compressionBuffer.getData(), 0, compressedKeyLen);
        CodecPool.returnCompressor(compressor);
      } else {
View Full Code Here

         out.write(b.getData(), 0, b.getLength());
    }
   
    public void writeRequest(OutputStream out) throws IOException {
      if( COMPRESS) {
          CompressionOutputStream cos = codec.createOutputStream(out);
          DataOutputStream dos = new DataOutputStream( cos);
          doWriteRequest( dos);
          cos.finish();
      }
      else {
          DataOutputStream dos = new DataOutputStream( out);
          doWriteRequest( dos);
      }
View Full Code Here

        compressedOutBuffer.reset();
        if (compressor != null) {
          // null compressor for non-native gzip
          compressor.reset();
        }
        CompressionOutputStream cos = codec.createOutputStream(compressedOutBuffer, compressor);
        bytes.writeAllTo(cos);
        cos.finish();
        cos.close();
        compressedBytes = BytesInput.from(compressedOutBuffer);
      }
      return compressedBytes;
    }
View Full Code Here

      out.write(b, off, len);
    }
   
    @Override
    public void flush() throws IOException {
      CompressionOutputStream cout = (CompressionOutputStream) out;
      cout.finish();
      cout.flush();
      cout.resetState();
    }
View Full Code Here

      key.numberRows = bufferedRecords;

      Compressor compressor = null;
      NonSyncDataOutputBuffer valueBuffer = null;
      CompressionOutputStream deflateFilter = null;
      DataOutputStream deflateOut = null;
      boolean isCompressed = isCompressed();
      int valueLength = 0;
      if (isCompressed) {
        ReflectionUtils.setConf(codec, this.conf);
        compressor = CodecPool.getCompressor(codec);
        valueBuffer = new NonSyncDataOutputBuffer();
        deflateFilter = codec.createOutputStream(valueBuffer, compressor);
        deflateOut = new DataOutputStream(deflateFilter);
      }

      for (int columnIndex = 0; columnIndex < columnNumber; columnIndex++) {
        ColumnBuffer currentBuf = columnBuffers[columnIndex];
        currentBuf.flushGroup();

        NonSyncDataOutputBuffer columnValue = currentBuf.columnValBuffer;
        int colLen;
        int plainLen = columnValuePlainLength[columnIndex];

        if (isCompressed) {
          if (deflateFilter instanceof SchemaAwareCompressionOutputStream) {
            ((SchemaAwareCompressionOutputStream)deflateFilter).
              setColumnIndex(columnIndex);
          }
          deflateFilter.resetState();
          deflateOut.write(columnValue.getData(), 0, columnValue.getLength());
          deflateOut.flush();
          deflateFilter.finish();
          // find how much compressed data was added for this column
          colLen = valueBuffer.getLength() - valueLength;
        } else {
          colLen = columnValuePlainLength[columnIndex];
        }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.CompressionOutputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.