Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.CompressionCodecFactory


    }
    FileSystem fs = FileSystem.get(conf);
    fs.mkdirs(tablePath);
    OutputStream os = fs.create(filePath);
    if (gzip) {
      CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
      CompressionCodec codec = ccf.getCodec(filePath);
      os = codec.createOutputStream(os);
    }
    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
    for (int i = 0; i < numRecords; i++) {
      w.write(getRecordLine(startId + i, extraCols));
View Full Code Here


    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
    }
    FileSystem fs = FileSystem.get(conf);
    InputStream is = fs.open(f);
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(f);
    LOG.info("gzip check codec is " + codec);
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (null == decompressor) {
      LOG.info("Verifying gzip sanity with null decompressor");
    } else {
View Full Code Here

  }

  @Override
  protected boolean isSplitable(JobContext context, Path file) {
    final CompressionCodec codec =
      new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
    if (null == codec) {
      return true;
    }

    // Once we remove support for Hadoop < 2.0
View Full Code Here

   * @throws IOException
   */
  public FileCollectionRecordReader(JobConf jobConf, PositionAwareSplit<CombineFileSplit> split) throws IOException
 
    super(jobConf, split);
    compressionCodecs = new CompressionCodecFactory(config);
  }
View Full Code Here

      }
    }.globAndProcess(srcPattern, srcPattern.getFileSystem(getConf()));
  }

  private InputStream decompress(Path p, FileSystem srcFs) throws IOException {
    CompressionCodecFactory factory = new CompressionCodecFactory(getConf());
    CompressionCodec codec = factory.getCodec(p);
    InputStream in = srcFs.open(p);
    if (codec == null) {
      throw new IOException("Cannot find codec for " + p);
    }
    return codec.createInputStream(in);
View Full Code Here

    FileSystem srcFs = srcPath.getFileSystem(getConf());
    Path dstPath = new Path(dstf);
    FileSystem dstFs = dstPath.getFileSystem(getConf());

    // Create codec
    CompressionCodecFactory factory = new CompressionCodecFactory(conf);
    CompressionCodec codec = factory.getCodec(dstPath);
    if (codec == null) {
      System.err.println(cmd.substring(1) + ": cannot find compression codec for "
          + dstf);
      return 1;
    }
View Full Code Here

   */
  private static FSImageCompression createCompression(Configuration conf,
                                                      String codecClassName)
    throws IOException {

    CompressionCodecFactory factory = new CompressionCodecFactory(conf);
    CompressionCodec codec = factory.getCodecByClassName(codecClassName);
    if (codec == null) {
      throw new IOException("Not a supported codec: " + codecClassName);
    }

    return new FSImageCompression(codec);
View Full Code Here

        boolean isCompressed = in.readBoolean();
        v.visit(ImageElement.IS_COMPRESSED, imageVersion);
        if (isCompressed) {
          String codecClassName = Text.readString(in);
          v.visit(ImageElement.COMPRESS_CODEC, codecClassName);
          CompressionCodecFactory codecFac = new CompressionCodecFactory(
              new Configuration());
          CompressionCodec codec = codecFac.getCodecByClassName(codecClassName);
          if (codec == null) {
            throw new IOException("Image compression codec not supported: "
                + codecClassName);
          }
          in = new DataInputStream(codec.createInputStream(in));
View Full Code Here

          // If path is a directory
          if (fStats.isDir()) {
            dirs.offer(path);
          }
          else if ((new CompressionCodecFactory(job)).getCodec(path) != null) {
            return super.getSplits(job, numSplits);
          }

          while (dirs.peek() != null) {
            Path tstPath = dirs.remove();
            FileStatus[] fStatus = inpFs.listStatus(tstPath);
            for (int idx = 0; idx < fStatus.length; idx++) {
              if (fStatus[idx].isDir()) {
                dirs.offer(fStatus[idx].getPath());
              }
              else if ((new CompressionCodecFactory(job)).getCodec(fStatus[idx].getPath()) != null) {
                return super.getSplits(job, numSplits);
              }
            }
          }
        }
View Full Code Here

    TableDesc desc = catalog.getTableDesc(tableName);
    assertEquals(2, desc.getMeta().getStat().getNumRows().intValue());

    FileSystem fs = FileSystem.get(tpch.getTestingCluster().getConfiguration());
    assertTrue(fs.exists(desc.getPath()));
    CompressionCodecFactory factory = new CompressionCodecFactory(tpch.getTestingCluster().getConfiguration());

    for (FileStatus file : fs.listStatus(desc.getPath())){
      CompressionCodec codec = factory.getCodec(file.getPath());
      assertTrue(codec instanceof DeflateCodec);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.CompressionCodecFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.