Package org.apache.hadoop.io.SequenceFile

Examples of org.apache.hadoop.io.SequenceFile.Metadata


     *          the file name
     * @throws IOException
     */
    public Writer(FileSystem fs, Configuration conf, Path name,
        Progressable progress, CompressionCodec codec) throws IOException {
      this(fs, conf, name, null, new Metadata(), codec);
    }
View Full Code Here


        Metadata metadata, CompressionCodec codec) throws IOException {
      RECORD_INTERVAL = conf.getInt(RECORD_INTERVAL_CONF_STR, RECORD_INTERVAL);
      columnNumber = conf.getInt(COLUMN_NUMBER_CONF_STR, 0);

      if (metadata == null)
        metadata = new Metadata();
      metadata.set(new Text(COLUMN_NUMBER_METADATA_STR), new Text(""
          + columnNumber));

      columnsBufferSize = conf.getInt(COLUMNS_BUFFER_SIZE_CONF_STR,
          4 * 1024 * 1024);
 
View Full Code Here

              "Unknown codec: " + codecClassname, cnfe);
        }
        keyDecompressor = CodecPool.getDecompressor(codec);
      }

      metadata = new Metadata();
      if (version >= VERSION_WITH_METADATA) { // if version >= 6
        metadata.readFields(in);
      }

      if (version > 1) { // if version > 1
View Full Code Here

      return out.getPos();
    }

    /** Constructs a RCFile Writer. */
    public Writer(FileSystem fs, Configuration conf, Path name) throws IOException {
      this(fs, conf, name, null, new Metadata(), null);
    }
View Full Code Here

     *          the file name
     * @throws IOException
     */
    public Writer(FileSystem fs, Configuration conf, Path name,
        Progressable progress, CompressionCodec codec) throws IOException {
      this(fs, conf, name, null, new Metadata(), codec);
    }
View Full Code Here

        Metadata metadata, CompressionCodec codec) throws IOException {
      RECORD_INTERVAL = conf.getInt(RECORD_INTERVAL_CONF_STR, RECORD_INTERVAL);
      columnNumber = conf.getInt(COLUMN_NUMBER_CONF_STR, 0);

      if (metadata == null) {
        metadata = new Metadata();
      }
      metadata.set(new Text(COLUMN_NUMBER_METADATA_STR), new Text(""
          + columnNumber));

      columnsBufferSize = conf.getInt(COLUMNS_BUFFER_SIZE_CONF_STR,
View Full Code Here

              "Unknown codec: " + codecClassname, cnfe);
        }
        keyDecompressor = CodecPool.getDecompressor(codec);
      }

      metadata = new Metadata();
      if (version >= VERSION_WITH_METADATA) { // if version >= 6
        metadata.readFields(in);
      }

      if (version > 1) { // if version > 1
View Full Code Here

          if (getCompressOutput(job)) {
            Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, GzipCodec.class);
            codec = ReflectionUtils.newInstance(codecClass, conf);
          }

          Metadata metadata = null;

          String ext = conf.get(EXTENSION_OVERRIDE_CONF, DEFAULT_EXTENSION);
          Path file = getDefaultWorkFile(job, ext.equalsIgnoreCase("none") ? null : ext);

          LOG.info("writing to rcfile " + file.toString());
View Full Code Here

      public Options() {
        mBufferSizeBytes = DEFAULT;
        mReplicationFactor = DEFAULT;
        mBlockSizeBytes = DEFAULT;
        mCompressionType = CompressionType.NONE;
        mMetadata = new Metadata();
      }
View Full Code Here

        // Configure schemas and add Avro serialization to the configuration.
        Configuration confWithAvro = new Configuration(conf);
        AvroSerialization.addToConfiguration(confWithAvro);

        // Read the metadata header from the SequenceFile to get the writer schemas.
        Metadata metadata = AvroSequenceFile.getMetadata(
            getFileSystem(), getInputPath(), confWithAvro);

        // Set the key schema if present in the metadata.
        Text keySchemaText = metadata.get(METADATA_FIELD_KEY_SCHEMA);
        if (null != keySchemaText) {
          LOG.debug("Using key writer schema from SequenceFile metadata: "
              + keySchemaText.toString());
          AvroSerialization.setKeyWriterSchema(
              confWithAvro, Schema.parse(keySchemaText.toString()));
          if (null != mKeyReaderSchema) {
            AvroSerialization.setKeyReaderSchema(confWithAvro, mKeyReaderSchema);
          }
        }

        // Set the value schema if present in the metadata.
        Text valueSchemaText = metadata.get(METADATA_FIELD_VALUE_SCHEMA);
        if (null != valueSchemaText) {
          LOG.debug("Using value writer schema from SequenceFile metadata: "
              + valueSchemaText.toString());
          AvroSerialization.setValueWriterSchema(
              confWithAvro, Schema.parse(valueSchemaText.toString()));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.SequenceFile.Metadata

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.