Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.CompressionCodec


      PerformTestRCFileAndSeqFile testcase = new PerformTestRCFileAndSeqFile(
          isLocalFS, file);

      // change these parameters
      boolean checkCorrect = true;
      CompressionCodec codec = new DefaultCodec();
      testcase.columnMaxSize = 30;

      // testcase.testWithColumnNumber(count, 2, checkCorrect, codec);
      // testcase.testWithColumnNumber(count, 10, checkCorrect, codec);
      // testcase.testWithColumnNumber(count, 25, checkCorrect, codec);
View Full Code Here


          Arrays.equals(outbytes, out.toByteArray()));
      out.reset();

      // Test a simple codec. Extension based detection. We use
      // Bzip2 cause its non-native.
      CompressionCodec codec = (CompressionCodec)
          ReflectionUtils.newInstance(BZip2Codec.class, conf);
      String extension = codec.getDefaultExtension();
      Path p = new Path(root, "file." + extension);
      OutputStream fout = new DataOutputStream(codec.createOutputStream(
          fs.create(p, true)));
      byte[] writebytes = "foo".getBytes();
      fout.write(writebytes);
      fout.close();
      out = new ByteArrayOutputStream();
View Full Code Here

                                                    boolean isCompressed)
    throws IOException {
    if(isCompressed) {
      Class<? extends CompressionCodec> codecClass =
        FileOutputFormat.getOutputCompressorClass(jc, DefaultCodec.class);
      CompressionCodec codec = (CompressionCodec)
        ReflectionUtils.newInstance(codecClass, jc);
      return codec.createOutputStream(out);
    } else {
      return (out);
    }
  }
View Full Code Here

    if(!isCompressed) {
      return "";
    } else {
      Class<? extends CompressionCodec> codecClass =
        FileOutputFormat.getOutputCompressorClass(jc, DefaultCodec.class);
      CompressionCodec codec = (CompressionCodec)
        ReflectionUtils.newInstance(codecClass, jc);
      return codec.getDefaultExtension();
    }
  }
View Full Code Here

  public static SequenceFile.Writer createSequenceWriter(JobConf jc, FileSystem fs,
                                                         Path file, Class<?> keyClass,
                                                         Class<?> valClass,
                                                         boolean isCompressed)
    throws IOException {
    CompressionCodec codec = null;
    CompressionType compressionType = CompressionType.NONE;
    Class codecClass = null;
    if (isCompressed) {
      compressionType = SequenceFileOutputFormat.getOutputCompressionType(jc);
      codecClass = SequenceFileOutputFormat.getOutputCompressorClass(jc, DefaultCodec.class);
View Full Code Here

    public FlatFileRecordReader(Configuration conf,
                                FileSplit split) throws IOException {
      final Path path = split.getPath();
      FileSystem fileSys = path.getFileSystem(conf);
      CompressionCodecFactory compressionCodecs = new CompressionCodecFactory(conf);
      final CompressionCodec codec = compressionCodecs.getCodec(path);
      this.conf = conf;

      fsin = fileSys.open(path);
      if (codec != null) {
        dcin = codec.createInputStream(fsin);
        in = new DataInputStream(dcin);
      } else {
        dcin = null;
        in = fsin;
      }
View Full Code Here

    pools.add(multi);
  }
 
  @Override
  protected boolean isSplitable(JobContext context, Path file) {
    final CompressionCodec codec =
      new CompressionCodecFactory(context.getConfiguration()).getCodec(file);
    if (null == codec) {
      return true;
    }
    return codec instanceof SplittableCompressionCodec;
View Full Code Here

  public static OutputStream createCompressedStream(JobConf jc, OutputStream out,
      boolean isCompressed) throws IOException {
    if (isCompressed) {
      Class<? extends CompressionCodec> codecClass = FileOutputFormat.getOutputCompressorClass(jc,
          DefaultCodec.class);
      CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, jc);
      return codec.createOutputStream(out);
    } else {
      return (out);
    }
  }
View Full Code Here

      return extension;
    }
    if ((hiveOutputFormat instanceof HiveIgnoreKeyTextOutputFormat) && isCompressed) {
      Class<? extends CompressionCodec> codecClass = FileOutputFormat.getOutputCompressorClass(jc,
          DefaultCodec.class);
      CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, jc);
      return codec.getDefaultExtension();
    }
    return "";
  }
View Full Code Here

   *          Java Class for value
   * @return output stream over the created sequencefile
   */
  public static SequenceFile.Writer createSequenceWriter(JobConf jc, FileSystem fs, Path file,
      Class<?> keyClass, Class<?> valClass, boolean isCompressed) throws IOException {
    CompressionCodec codec = null;
    CompressionType compressionType = CompressionType.NONE;
    Class codecClass = null;
    if (isCompressed) {
      compressionType = SequenceFileOutputFormat.getOutputCompressionType(jc);
      codecClass = FileOutputFormat.getOutputCompressorClass(jc, DefaultCodec.class);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.CompressionCodec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.