Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.CompressionInputStream


            else if (type.equals("gz")) {
               codec = new GzipCodec();
               ((GzipCodec)codec).setConf(new Configuration());
            }

            CompressionInputStream createInputStream = codec
                  .createInputStream(new FileInputStream(file));
            int b;
            StringBuffer sb = new StringBuffer();
            while ((b = createInputStream.read()) != -1) {
               sb.append((char) b);
            }
            createInputStream.close();

            // Assert for the number of fields and keys.
            String[] fields = sb.toString().split("\\t");
            assertEquals(3, fields.length);
            String id = indexFolder.substring(1,2);
View Full Code Here


   * @return The result after decompressing BSPMessageBundle.
   */
  @Override
  public byte[] decompress(byte[] compressedBytes) {
    ByteArrayInputStream bis = null;
    CompressionInputStream sis = null;
    DataInputStream dis = null;
    byte[] bytes = null;

    try {
      bis = new ByteArrayInputStream(compressedBytes);
      sis = codec.createInputStream(bis);
      dis = new DataInputStream(sis);
      bytes = IOUtils.toByteArray(dis);
    } catch (IOException ioe) {
      LOG.error("Unable to decompress.", ioe);
    } finally {
      try {
        dis.close();
        sis.close();
        bis.close();
      } catch (IOException e) {
        LOG.warn("Failed to close decompression streams.", e);
      }
    }
View Full Code Here

        new CompressionCodecFactory(conf);
      CompressionCodec codec = compressionCodecs.getCodec(file);
      if (codec != null) {
        Decompressor decompressor = CodecPool.getDecompressor(codec);
        if (decompressor != null) {
          CompressionInputStream in =
            codec.createInputStream(fs.open(file), decompressor);
          //TODO Seek doesnt work with compressed input stream.
          //     Use SplittableCompressionCodec?
          return (InputStream)in;
        }
      }
    }
    FSDataInputStream in = fs.open(file);
    in.seek(offset);
    return (InputStream)in;
  }
View Full Code Here

        new CompressionCodecFactory(conf);
      CompressionCodec codec = compressionCodecs.getCodec(file);
      if (codec != null) {
        Decompressor decompressor = CodecPool.getDecompressor(codec);
        if (decompressor != null) {
          CompressionInputStream in =
            codec.createInputStream(fs.open(file), decompressor);
          //TODO Seek doesnt work with compressed input stream.
          //     Use SplittableCompressionCodec?
          return (InputStream)in;
        }
      }
    }
    FSDataInputStream in = fs.open(file);
    in.seek(offset);
    return (InputStream)in;
  }
View Full Code Here

      DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
      deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
          compressedDataBuffer.getLength());

      CompressionInputStream inflateFilter = new BlockDecompressorStream(
          deCompressedDataBuffer, new SnappyDecompressor(bufferSize),
          bufferSize);

      inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
View Full Code Here

      DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
      deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
          compressedDataBuffer.getLength());

      CompressionInputStream inflateFilter = new BlockDecompressorStream(
          deCompressedDataBuffer, new Lz4Decompressor(bufferSize), bufferSize);

      inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));

      byte[] result = new byte[BYTE_SIZE];
View Full Code Here

      compressedKeyLen = in.readInt();
      if (decompress) {
        keyTempBuffer.reset();
        keyTempBuffer.write(in, compressedKeyLen);
        keyDecompressBuffer.reset(keyTempBuffer.getData(), compressedKeyLen);
        CompressionInputStream deflatFilter = codec.createInputStream(
            keyDecompressBuffer, keyDecompressor);
        DataInputStream compressedIn = new DataInputStream(deflatFilter);
        deflatFilter.resetState();
        keyDecompressedData.reset();
        keyDecompressedData.write(compressedIn, currentKeyLength);
        keyDataIn.reset(keyDecompressedData.getData(), currentKeyLength);
        currentKey.readFields(keyDataIn);
      } else {
View Full Code Here

            else if (type.equals("gz")) {
               codec = new GzipCodec();
               ((GzipCodec)codec).setConf(new Configuration());
            }

            CompressionInputStream createInputStream = codec
                  .createInputStream(new FileInputStream(file));
            int b;
            StringBuffer sb = new StringBuffer();
            while ((b = createInputStream.read()) != -1) {
               sb.append((char) b);
            }
            createInputStream.close();

            // Assert for the number of fields and keys.
            String[] fields = sb.toString().split("\\t");
            assertEquals(3, fields.length);
            String id = indexFolder.substring(1,2);
View Full Code Here

        if(codec == null) {
            return new BufferedReader(new FileReader(file));
        } else {
            Decompressor decompressor = CodecPool.getDecompressor(codec);
            FileInputStream fis = new FileInputStream(file);
            CompressionInputStream cis = codec.createInputStream(fis, decompressor);
            BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor);
            return br;
        }
    }
View Full Code Here

            {
              CompressionCodecFactory compressionCodecs =  new CompressionCodecFactory(job);
              final CompressionCodec codec = compressionCodecs.getCodec(file);
               if (codec != null) {
                  end = Long.MAX_VALUE;
                    CompressionInputStream stream = codec.createInputStream(fileIn);
                    this.xmlLoaderBPIS = new XMLLoaderBufferedPositionedInputStream(stream,start,end);
                  }
            }
           
            else
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.CompressionInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.