Package org.apache.avro.file

Examples of org.apache.avro.file.CodecFactory


    DataFileWriter<GenericRecord> dfw = new DataFileWriter<GenericRecord>(gdw);

    if (isCompressed) {
      int level = jobConf.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = jobConf.get(OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory = codecName.equals(DEFLATE_CODEC)
          ? CodecFactory.deflateCodec(level)
          : CodecFactory.fromString(codecName);
      dfw.setCodec(factory);
    }
View Full Code Here


      JobConf job) throws UnsupportedEncodingException {
    if (FileOutputFormat.getCompressOutput(job)) {
      int level = job.getInt(DEFLATE_LEVEL_KEY,
          DEFAULT_DEFLATE_LEVEL);
      String codecName = job.get(AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory = codecName.equals(DEFLATE_CODEC)
        ? CodecFactory.deflateCodec(level)
        : CodecFactory.fromString(codecName);
      writer.setCodec(factory);
    }
View Full Code Here

    if (FileOutputFormat.getCompressOutput(context)) {
      int level = context.getConfiguration()
        .getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = context.getConfiguration()
        .get(org.apache.avro.mapred.AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory =
        codecName.equals(DEFLATE_CODEC) ? CodecFactory.deflateCodec(level)
          : CodecFactory.fromString(codecName);
      writer.setCodec(factory);
    }
View Full Code Here

    }
 
    String codecName = opts.hasArgument(codec)
      ? codec.value(opts)
      : DEFLATE_CODEC;
    CodecFactory codecFactory = codecName.equals(DEFLATE_CODEC)
      ? CodecFactory.deflateCodec(compressionLevel)
      : CodecFactory.fromString(codecName);

    BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
    BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out);
View Full Code Here

    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
        input, new GenericDatumReader<GenericRecord>());
    Schema schema = reader.getSchema();
    DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(
        new GenericDatumWriter<GenericRecord>());
    CodecFactory codec = opts.valueOf(codecOpt).equals("deflate")
        ? CodecFactory.deflateCodec(Integer.parseInt(levelOpt.value(opts)))
        : CodecFactory.fromString(codecOpt.value(opts));
    writer.setCodec(codec);
    for (String key : reader.getMetaKeys()) {
      if (!DataFileWriter.isReservedMeta(key)) {
View Full Code Here

    if (org.apache.hadoop.mapred.FileOutputFormat.getCompressOutput(jc)) {
      int level = conf.getInt(org.apache.avro.mapred.AvroOutputFormat.DEFLATE_LEVEL_KEY,
          org.apache.avro.mapred.AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
      String codecName = conf.get(AvroJob.OUTPUT_CODEC,
          org.apache.avro.file.DataFileConstants.DEFLATE_CODEC);
      CodecFactory codec = codecName.equals(org.apache.avro.file.DataFileConstants.DEFLATE_CODEC)
          ? CodecFactory.deflateCodec(level)
          : CodecFactory.fromString(codecName);
      WRITER.setCodec(codec);
    }
View Full Code Here

      new DataFileWriter<T>(new ReflectDatumWriter<T>());

    if (FileOutputFormat.getCompressOutput(job)) {
      int level = job.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = job.get(AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory = codecName.equals(DEFLATE_CODEC)
        ? CodecFactory.deflateCodec(level)
        : CodecFactory.fromString(codecName);
      writer.setCodec(factory);
    }
View Full Code Here

    DataFileWriter<GenericRecord> dfw = new DataFileWriter<GenericRecord>(gdw);

    if (isCompressed) {
      int level = jobConf.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = jobConf.get(OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory = codecName.equals(DEFLATE_CODEC)
          ? CodecFactory.deflateCodec(level)
          : CodecFactory.fromString(codecName);
      dfw.setCodec(factory);
    }
View Full Code Here

    dataFileWriter = new DataFileWriter<Object>(writer);

    dataFileWriter.setSyncInterval(syncIntervalBytes);

    try {
      CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
      dataFileWriter.setCodec(codecFactory);
    } catch (AvroRuntimeException e) {
      logger.warn("Unable to instantiate avro codec with name (" +
          compressionCodec + "). Compression disabled. Exception follows.", e);
    }
View Full Code Here

        DataFileWriter<Object> writer = new DataFileWriter<Object>(new PigAvroDatumWriter(schema));

        if (FileOutputFormat.getCompressOutput(context)) {
            int level = conf.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
            String codecName = conf.get(OUTPUT_CODEC, DEFLATE_CODEC);
            CodecFactory factory = codecName.equals(DEFLATE_CODEC)
                ? CodecFactory.deflateCodec(level)
                : CodecFactory.fromString(codecName);
            writer.setCodec(factory);
        }
View Full Code Here

TOP

Related Classes of org.apache.avro.file.CodecFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.