Examples of CompressionInputStream


Examples of org.apache.hadoop.io.compress.CompressionInputStream

            else if (type.equals("gz")) {
               codec = new GzipCodec();
               ((GzipCodec)codec).setConf(new Configuration());
            }

            CompressionInputStream createInputStream = codec
                  .createInputStream(new FileInputStream(file));
            int b;
            StringBuffer sb = new StringBuffer();
            while ((b = createInputStream.read()) != -1) {
               sb.append((char) b);
            }
            createInputStream.close();

            // Assert for the number of fields and keys.
            String[] fields = sb.toString().split("\\t");
            assertEquals(3, fields.length);
            String id = indexFolder.substring(1,2);
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

        if(codec == null) {
            return new BufferedReader(new FileReader(file));
        } else {
            Decompressor decompressor = CodecPool.getDecompressor(codec);
            FileInputStream fis = new FileInputStream(file);
            CompressionInputStream cis = codec.createInputStream(fis, decompressor);
            BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor);
            return br;
        }
    }
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

            {
              CompressionCodecFactory compressionCodecs =  new CompressionCodecFactory(job);
              final CompressionCodec codec = compressionCodecs.getCodec(file);
               if (codec != null) {
                  end = Long.MAX_VALUE;
                    CompressionInputStream stream = codec.createInputStream(fileIn);
                    this.xmlLoaderBPIS = new XMLLoaderBufferedPositionedInputStream(stream,start,end);
                  }
            }
           
            else
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

      compressedKeyLen = in.readInt();
      if (decompress) {
        keyTempBuffer.reset();
        keyTempBuffer.write(in, compressedKeyLen);
        keyDecompressBuffer.reset(keyTempBuffer.getData(), compressedKeyLen);
        CompressionInputStream deflatFilter = codec.createInputStream(
            keyDecompressBuffer, keyDecompressor);
        DataInputStream compressedIn = new DataInputStream(deflatFilter);
        deflatFilter.resetState();
        keyDecompressedData.reset();
        keyDecompressedData.write(compressedIn, currentKeyLength);
        keyDataIn.reset(keyDecompressedData.getData(), currentKeyLength);
        currentKey.readFields(keyDataIn);
      } else {
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

      compressedKeyLen = in.readInt();
      if (decompress) {
        keyTempBuffer.reset();
        keyTempBuffer.write(in, compressedKeyLen);
        keyDecompressBuffer.reset(keyTempBuffer.getData(), compressedKeyLen);
        CompressionInputStream deflatFilter = codec.createInputStream(
            keyDecompressBuffer, keyDecompressor);
        DataInputStream compressedIn = new DataInputStream(deflatFilter);
        deflatFilter.resetState();
        keyDecompressedData.reset();
        keyDecompressedData.write(compressedIn, currentKeyLength);
        keyDataIn.reset(keyDecompressedData.getData(), currentKeyLength);
        currentKey.readFields(keyDataIn);
      } else {
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

      compressedKeyLen = in.readInt();
      if (decompress) {
        keyTempBuffer.reset();
        keyTempBuffer.write(in, compressedKeyLen);
        keyDecompressBuffer.reset(keyTempBuffer.getData(), compressedKeyLen);
        CompressionInputStream deflatFilter = codec.createInputStream(
            keyDecompressBuffer, keyDecompressor);
        DataInputStream compressedIn = new DataInputStream(deflatFilter);
        deflatFilter.resetState();
        keyDecompressedData.reset();
        keyDecompressedData.write(compressedIn, currentKeyLength);
        keyDataIn.reset(keyDecompressedData.getData(), currentKeyLength);
        currentKey.readFields(keyDataIn);
      } else {
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

    CompressionCodec codec = new CompressionCodecFactory(job).getCodec(file);
    if (null != codec) {
      isCompressedInput = true
      decompressor = CodecPool.getDecompressor(codec);
      CompressionInputStream cIn
          = codec.createInputStream(fileIn, decompressor);
      filePosition = cIn;
      inputStream = cIn;
      numRecordsRemainingInSplit = Long.MAX_VALUE;
      LOG.info(
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

        new CompressionCodecFactory(conf);
      CompressionCodec codec = compressionCodecs.getCodec(file);
      if (codec != null) {
        Decompressor decompressor = CodecPool.getDecompressor(codec);
        if (decompressor != null) {
          CompressionInputStream in =
            codec.createInputStream(fs.open(file), decompressor);
          //TODO Seek doesnt work with compressed input stream.
          //     Use SplittableCompressionCodec?
          return (InputStream)in;
        }
      }
    }
    FSDataInputStream in = fs.open(file);
    in.seek(offset);
    return (InputStream)in;
  }
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

  private void readFromHdfs(ILineSender lineSender) {
    FSDataInputStream in = null;
    CompressionCodecFactory factory;
    CompressionCodec codec;
    CompressionInputStream cin = null;
    LineIterator itr = null;
    try {
      conf = DFSUtils.getConf(filePath, null);
      fs = DFSUtils.createFileSystem(new URI(filePath), conf);
      in = fs.open(new Path(filePath));
      factory = new CompressionCodecFactory(conf);
      codec = factory.getCodec(new Path(filePath));
      if (codec == null) {
        LOG.info("codec not found, using text file reader");
        itr = new LineIterator(new BufferedReader(
            new InputStreamReader(in)));
      } else {
        LOG.info("found code " + codec.getClass());
        cin = codec.createInputStream(in);
        itr = new LineIterator(new BufferedReader(
            new InputStreamReader(cin)));
      }
      while (itr.hasNext()) {
        ILine oneLine = lineSender.createNewLine();
        String line = itr.nextLine();
        String[] parts = StringUtils
            .splitByWholeSeparatorPreserveAllTokens(line,
                FIELD_SEPARATOR);
        for (int i = 0; i < parts.length; i++) {
          if (HIVE_COLUMN_NULL_VALUE.equals(parts[i])) {
            oneLine.addField(null, i);
          } else {
            oneLine.addField(parts[i], i);
          }
        }
        boolean flag = lineSender.send(oneLine);
        if (flag) {
          getMonitor().increaseSuccessLines();
        } else {
          getMonitor().increaseFailedLines();
          LOG.debug("failed to send line: " + oneLine.toString('\t'));
        }
      }
      lineSender.flush();

    } catch (Exception e) {
      LOG.error(e.getCause());
      throw new WormholeException(e,
          JobStatus.READ_DATA_EXCEPTION.getStatus());
    } finally {
      if (itr != null) {
        itr.close();
      }
      try {
        if (cin != null) {
          cin.close();
        }
        if (in != null) {
          in.close();
        }
        if (fs != null) {
View Full Code Here

Examples of org.apache.hadoop.io.compress.CompressionInputStream

          
            cout.write(inmsg);
            cout.flush();

            ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
            CompressionInputStream cin = c.createInputStream(bin);

         
            int totaln = 0;

            while (cin.available() > 0)
            {
                int n = cin.read(buffer);
                if (n < 0)
                    break;

                try
                {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.