Package org.apache.avro.file

Examples of org.apache.avro.file.CodecFactory


   *   <li>Next use mapred.output.compression.codec if populated</li>
   *   <li>If not default to Deflate Codec</li>
   * </ul> 
   */
  static CodecFactory getCodecFactory(JobConf job) {
    CodecFactory factory = null;
   
    if (FileOutputFormat.getCompressOutput(job)) {
      int level = job.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = job.get(AvroJob.OUTPUT_CODEC);
     
View Full Code Here


  *   <li>{@code org.apache.hadoop.io.compress.GZipCodec} will map to {@code deflate}</li>
  * </ul>
  */
  public static CodecFactory fromHadoopString(String hadoopCodecClass) {

    CodecFactory o = null;
    try {
      String avroCodec = HADOOP_AVRO_NAME_MAP.get(hadoopCodecClass);
      if (avroCodec != null) {
        o = CodecFactory.fromString(avroCodec);
      }
View Full Code Here

        input, new GenericDatumReader<GenericRecord>());
    Schema schema = reader.getSchema();
    DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(
        new GenericDatumWriter<GenericRecord>());
    // unlike the other Avro tools, we default to a null codec, not deflate
    CodecFactory codec = Util.codecFactory(opts, codecOpt, levelOpt, DataFileConstants.NULL_CODEC);
    writer.setCodec(codec);
    for (String key : reader.getMetaKeys()) {
      if (!DataFileWriter.isReservedMeta(key)) {
        writer.setMeta(key, reader.getMeta(key));
      }
View Full Code Here

          " Hadoop URI's, or '-' for stdin/stdout");
      p.printHelpOn(err);
      return 1;
    }
    CodecFactory codecFactory = Util.codecFactory(opts, codec, level);
 
    BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
    BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out);
   
    DataFileWriter<ByteBuffer> writer =
View Full Code Here

    }
    writer = new DataFileWriter<GenericRecord>(
        new GenericDatumWriter<GenericRecord>());
   
    String codecName = reader.getMetaString(DataFileConstants.CODEC);
    CodecFactory codec = (codecName == null)
        ? CodecFactory.fromString(DataFileConstants.NULL_CODEC)
        : CodecFactory.fromString(codecName);
    writer.setCodec(codec);
    for (String key : reader.getMetaKeys()) {
      if (!DataFileWriter.isReservedMeta(key)) {
View Full Code Here

    }
    writer = new DataFileWriter<GenericRecord>(
        new GenericDatumWriter<GenericRecord>());
   
    String codecName = reader.getMetaString(DataFileConstants.CODEC);
    CodecFactory codec = (codecName == null)
        ? CodecFactory.fromString(DataFileConstants.NULL_CODEC)
        : CodecFactory.fromString(codecName);
    writer.setCodec(codec);
    for (String key : reader.getMetaKeys()) {
      if (!DataFileWriter.isReservedMeta(key)) {
View Full Code Here

          " Hadoop URI's, or '-' for stdin/stdout");
      p.printHelpOn(err);
      return 1;
    }
    CodecFactory codecFactory = Util.codecFactory(opts, codec, level);
 
    BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
    BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out);
   
    DataFileWriter<ByteBuffer> writer =
View Full Code Here

        input, new GenericDatumReader<GenericRecord>());
    Schema schema = reader.getSchema();
    DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(
        new GenericDatumWriter<GenericRecord>());
    // unlike the other Avro tools, we default to a null codec, not deflate
    CodecFactory codec = Util.codecFactory(opts, codecOpt, levelOpt, DataFileConstants.NULL_CODEC);
    writer.setCodec(codec);
    for (String key : reader.getMetaKeys()) {
      if (!DataFileWriter.isReservedMeta(key)) {
        writer.setMeta(key, reader.getMeta(key));
      }
View Full Code Here

    DataFileWriter<GenericRecord> dfw = new DataFileWriter<GenericRecord>(gdw);
   
    if (isCompressed) {
      int level = jobConf.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = jobConf.get(OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory = codecName.equals(DEFLATE_CODEC)
          ? CodecFactory.deflateCodec(level)
          : CodecFactory.fromString(codecName);
      dfw.setCodec(factory);
    }
View Full Code Here

    if (org.apache.hadoop.mapred.FileOutputFormat.getCompressOutput(jc)) {
      int level = conf.getInt(org.apache.avro.mapred.AvroOutputFormat.DEFLATE_LEVEL_KEY,
          org.apache.avro.mapred.AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
      String codecName = conf.get(AvroJob.OUTPUT_CODEC,
          org.apache.avro.file.DataFileConstants.DEFLATE_CODEC);
      CodecFactory codec = codecName.equals(org.apache.avro.file.DataFileConstants.DEFLATE_CODEC)
          ? CodecFactory.deflateCodec(level)
          : CodecFactory.fromString(codecName);
      WRITER.setCodec(codec);
    }
View Full Code Here

TOP

Related Classes of org.apache.avro.file.CodecFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.