Examples of InputJobInfo


Examples of org.apache.hcatalog.mapreduce.InputJobInfo

        job.setJobName("hbase-scan-column");
        job.setJarByClass(this.getClass());
        job.setMapperClass(MapReadProjectionHTable.class);
        job.setInputFormat(HBaseInputFormat.class);

        InputJobInfo inputJobInfo = InputJobInfo.create(
                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
        //Configure projection schema
        job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema()));
        Job newJob = new Job(job);
        HCatInputFormat.setInput(newJob, inputJobInfo);
        String inputJobString = newJob.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO);
        InputJobInfo info = (InputJobInfo) HCatUtil.deserialize(inputJobString);
        job.set(HCatConstants.HCAT_KEY_JOB_INFO, inputJobString);
        for (PartInfo partinfo : info.getPartitions()) {
            for (Entry<String, String> entry : partinfo.getJobProperties().entrySet())
                job.set(entry.getKey(), entry.getValue());
        }
        assertEquals("testFamily:testQualifier1", job.get(TableInputFormat.SCAN_COLUMNS));
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

        Job job = new Job(conf, "hbase-aborted-transaction");
        job.setJarByClass(this.getClass());
        job.setMapperClass(MapReadHTable.class);
        MapReadHTable.resetCounters();
        job.setInputFormatClass(HCatInputFormat.class);
        InputJobInfo inputJobInfo = InputJobInfo.create(
                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
        HCatInputFormat.setInput(job, inputJobInfo);
        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job, outputDir);
        job.setMapOutputKeyClass(BytesWritable.class);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

        }
        Job job = new Job(conf, "hbase-running-aborted-transaction");
        job.setJarByClass(this.getClass());
        job.setMapperClass(MapReadHTableRunningAbort.class);
        job.setInputFormatClass(HCatInputFormat.class);
        InputJobInfo inputJobInfo = InputJobInfo.create(
                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
        HCatInputFormat.setInput(job, inputJobInfo);
        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job, outputDir);
        job.setMapOutputKeyClass(BytesWritable.class);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

        // Populate jobProperties with input table name, table columns, RM snapshot,
        // hbase-default.xml and hbase-site.xml
        Map<String, String> tableJobProperties = tableDesc.getJobProperties();
        String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_JOB_INFO);
        try {
            InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobString);
            HCatTableInfo tableInfo = inputJobInfo.getTableInfo();
            String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedHBaseTableName(tableInfo);
            jobProperties.put(TableInputFormat.INPUT_TABLE, qualifiedTableName);

            Configuration jobConf = getJobConf();
            addHbaseResources(jobConf, jobProperties);
            JobConf copyOfConf = new JobConf(jobConf);
            HBaseConfiguration.addHbaseResources(copyOfConf);
            //Getting hbase delegation token in getInputSplits does not work with PIG. So need to
            //do it here
            if (jobConf instanceof JobConf) { //Should be the case
                HBaseUtil.addHBaseDelegationToken(copyOfConf);
                ((JobConf)jobConf).getCredentials().addAll(copyOfConf.getCredentials());
            }

            String outputSchema = jobConf.get(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA);
            jobProperties.put(TableInputFormat.SCAN_COLUMNS, getScanColumns(tableInfo, outputSchema));

            String serSnapshot = (String) inputJobInfo.getProperties().get(
                    HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY);
            if (serSnapshot == null) {
                HCatTableSnapshot snapshot = HBaseRevisionManagerUtil.createSnapshot(copyOfConf,
                        qualifiedTableName, tableInfo);
                jobProperties.put(HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY,
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

    @Override
    public RecordReader<ImmutableBytesWritable, Result> getRecordReader(
            InputSplit split, JobConf job, Reporter reporter)
            throws IOException {
        String jobString = job.get(HCatConstants.HCAT_KEY_JOB_INFO);
        InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobString);

        String tableName = job.get(TableInputFormat.INPUT_TABLE);
        TableSplit tSplit = (TableSplit) split;
        HbaseSnapshotRecordReader recordReader = new HbaseSnapshotRecordReader(inputJobInfo, job);
        inputFormat.setConf(job);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

        if(inputJobString == null){
            throw new IOException(
                    "InputJobInfo information not found in JobContext. "
                            + "HCatInputFormat.setInput() not called?");
        }
        InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize(inputJobString);
        HCatTableSnapshot hcatSnapshot = HBaseRevisionManagerUtil
                .convertSnapshot(snpt, inputInfo.getTableInfo());

        return hcatSnapshot;
    }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

    if (inputJobString == null) {
      throw new IOException(
        "InputJobInfo information not found in JobContext. "
          + "HCatInputFormat.setInput() not called?");
    }
    InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize(inputJobString);
    HCatTableSnapshot hcatSnapshot = HBaseRevisionManagerUtil
      .convertSnapshot(snpt, inputInfo.getTableInfo());

    return hcatSnapshot;
  }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

    // Populate jobProperties with input table name, table columns, RM snapshot,
    // hbase-default.xml and hbase-site.xml
    Map<String, String> tableJobProperties = tableDesc.getJobProperties();
    String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_JOB_INFO);
    try {
      InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobString);
      HCatTableInfo tableInfo = inputJobInfo.getTableInfo();
      String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedHBaseTableName(tableInfo);
      jobProperties.put(TableInputFormat.INPUT_TABLE, qualifiedTableName);

      Configuration jobConf = getJobConf();
      addResources(jobConf, jobProperties);
      JobConf copyOfConf = new JobConf(jobConf);
      HBaseConfiguration.addHbaseResources(copyOfConf);
      //Getting hbase delegation token in getInputSplits does not work with PIG. So need to
      //do it here
      if (jobConf instanceof JobConf) { //Should be the case
        HBaseUtil.addHBaseDelegationToken(copyOfConf);
        ((JobConf) jobConf).getCredentials().addAll(copyOfConf.getCredentials());
      }

      String outputSchema = jobConf.get(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA);
      jobProperties.put(TableInputFormat.SCAN_COLUMNS, getScanColumns(tableInfo, outputSchema));

      String serSnapshot = (String) inputJobInfo.getProperties().get(
        HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY);
      if (serSnapshot == null) {
        HCatTableSnapshot snapshot =
          HBaseRevisionManagerUtil.createSnapshot(
            RevisionManagerConfiguration.create(copyOfConf),
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

  @Override
  public RecordReader<ImmutableBytesWritable, ResultWritable> getRecordReader(
    InputSplit split, JobConf job, Reporter reporter)
    throws IOException {
    String jobString = job.get(HCatConstants.HCAT_KEY_JOB_INFO);
    InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(jobString);

    String tableName = job.get(TableInputFormat.INPUT_TABLE);
    TableSplit tSplit = (TableSplit) split;
    HbaseSnapshotRecordReader recordReader = new HbaseSnapshotRecordReader(inputJobInfo, job);
    inputFormat.setConf(job);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.InputJobInfo

   */
  @Override
  public ResourceStatistics getStatistics(String location, Job job) throws IOException {
    try {
      ResourceStatistics stats = new ResourceStatistics();
      InputJobInfo inputJobInfo = (InputJobInfo) HCatUtil.deserialize(
        job.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO));
      stats.setmBytes(getSizeInBytes(inputJobInfo) / 1024 / 1024);
      return stats;
    } catch (Exception e) {
      throw new IOException(e);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.