Package org.apache.hadoop.io.SequenceFile

Examples of org.apache.hadoop.io.SequenceFile.Metadata


              "Unknown codec: " + codecClassname, cnfe);
        }
        keyDecompressor = CodecPool.getDecompressor(codec);
      }

      metadata = new Metadata();
      metadata.readFields(in);

      in.readFully(sync); // read sync bytes
      headerEnd = in.getPos();
    }
View Full Code Here


      return out.getPos();
    }

    /** Constructs a RCFile Writer. */
    public Writer(FileSystem fs, Configuration conf, Path name) throws IOException {
      this(fs, conf, name, null, new Metadata(), null);
    }
View Full Code Here

     *          the file name
     * @throws IOException
     */
    public Writer(FileSystem fs, Configuration conf, Path name,
        Progressable progress, CompressionCodec codec) throws IOException {
      this(fs, conf, name, progress, new Metadata(), codec);
    }
View Full Code Here

        Metadata metadata, CompressionCodec codec) throws IOException {
      RECORD_INTERVAL = conf.getInt(RECORD_INTERVAL_CONF_STR, RECORD_INTERVAL);
      columnNumber = conf.getInt(COLUMN_NUMBER_CONF_STR, 0);

      if (metadata == null) {
        metadata = new Metadata();
      }
      metadata.set(new Text(COLUMN_NUMBER_METADATA_STR), new Text(""
          + columnNumber));

      columnsBufferSize = conf.getInt(COLUMNS_BUFFER_SIZE_CONF_STR,
View Full Code Here

        Writer writer = null;
        Reader reader = null;
        try {
          short replication = (short) acuConf.getCount(Property.LOGGER_RECOVERY_FILE_REPLICATION);
          writer = SequenceFile.createWriter(fs, fs.getConf(), dest, LogFileKey.class, LogFileValue.class, fs.getConf().getInt("io.file.buffer.size", 4096),
              replication, fs.getDefaultBlockSize(), SequenceFile.CompressionType.BLOCK, new DefaultCodec(), null, new Metadata());
          FileSystem local = TraceFileSystem.wrap(FileSystem.getLocal(fs.getConf()).getRaw());
          reader = new SequenceFile.Reader(local, new Path(findLocalFilename(localLog)), fs.getConf());
          while (reader.next(key, value)) {
            writer.append(key, value);
          }
View Full Code Here

      public Options() {
        mBufferSizeBytes = DEFAULT;
        mReplicationFactor = DEFAULT;
        mBlockSizeBytes = DEFAULT;
        mCompressionType = CompressionType.NONE;
        mMetadata = new Metadata();
      }
View Full Code Here

              "Unknown codec: " + codecClassname, cnfe);
        }
        keyDecompressor = CodecPool.getDecompressor(codec);
      }

      metadata = new Metadata();
      if (version >= VERSION_WITH_METADATA) { // if version >= 6
        metadata.readFields(in);
      }

      if (version > 1) { // if version > 1
View Full Code Here

        // Configure schemas and add Avro serialization to the configuration.
        Configuration confWithAvro = new Configuration(conf);
        AvroSerialization.addToConfiguration(confWithAvro);

        // Read the metadata header from the SequenceFile to get the writer schemas.
        Metadata metadata = AvroSequenceFile.getMetadata(
            getFileSystem(), getInputPath(), confWithAvro);

        // Set the key schema if present in the metadata.
        Text keySchemaText = metadata.get(METADATA_FIELD_KEY_SCHEMA);
        if (null != keySchemaText) {
          LOG.debug("Using key writer schema from SequenceFile metadata: "
              + keySchemaText.toString());
          AvroSerialization.setKeyWriterSchema(
              confWithAvro, Schema.parse(keySchemaText.toString()));
          if (null != mKeyReaderSchema) {
            AvroSerialization.setKeyReaderSchema(confWithAvro, mKeyReaderSchema);
          }
        }

        // Set the value schema if present in the metadata.
        Text valueSchemaText = metadata.get(METADATA_FIELD_VALUE_SCHEMA);
        if (null != valueSchemaText) {
          LOG.debug("Using value writer schema from SequenceFile metadata: "
              + valueSchemaText.toString());
          AvroSerialization.setValueWriterSchema(
              confWithAvro, Schema.parse(valueSchemaText.toString()));
View Full Code Here

              "Unknown codec: " + codecClassname, cnfe);
        }
        keyDecompressor = CodecPool.getDecompressor(codec);
      }

      metadata = new Metadata();
      metadata.readFields(sin);

      sin.readFully(sync); // read sync bytes
      headerEnd = sin.getPos();
    }
View Full Code Here

    if (values.length % 2 != 0) {
      throw new IllegalArgumentException("Must have a matched set of " +
                                         "key-value pairs. " + values.length+
                                         " strings supplied.");
    }
    Metadata result = new Metadata();
    for(int i=0; i < values.length; i += 2) {
      result.set(values[i], values[i+1]);
    }
    return result;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.SequenceFile.Metadata

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.