Package org.apache.hcatalog.mapreduce

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo


    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key",schema,Integer.parseInt(vals[0]));
            for(int i=1;i<vals.length;i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0],schema,pair[1]);
View Full Code Here


    static class MapWriteAbortTransaction extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            if (vals[0].equals("3")) {
                throw new IOException("Failing map to test abort");
            }
View Full Code Here

        // Populate RM transaction in OutputJobInfo
        // In case of bulk mode, populate intermediate output location
        Map<String, String> tableJobProperties = tableDesc.getJobProperties();
        String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        try {
            OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(jobString);
            HCatTableInfo tableInfo = outputJobInfo.getTableInfo();
            String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedHBaseTableName(tableInfo);
            jobProperties.put(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY, qualifiedTableName);
            jobProperties.put(TableOutputFormat.OUTPUT_TABLE, qualifiedTableName);

            Configuration jobConf = getJobConf();
            addHbaseResources(jobConf, jobProperties);

            Configuration copyOfConf = new Configuration(jobConf);
            HBaseConfiguration.addHbaseResources(copyOfConf);

            String txnString = outputJobInfo.getProperties().getProperty(
                    HBaseConstants.PROPERTY_WRITE_TXN_KEY);
            Transaction txn = null;
            if (txnString == null) {
                txn = HBaseRevisionManagerUtil.beginWriteTransaction(qualifiedTableName, tableInfo, copyOfConf);
                String serializedTxn = HCatUtil.serialize(txn);
                outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY,
                        serializedTxn);
            } else {
                txn = (Transaction) HCatUtil.deserialize(txnString);
            }
            if (isBulkMode(outputJobInfo)) {
                String tableLocation = tableInfo.getTableLocation();
                String location = new Path(tableLocation, "REVISION_" + txn.getRevisionNumber())
                        .toString();
                outputJobInfo.getProperties().setProperty(PROPERTY_INT_OUTPUT_LOCATION, location);
                // We are writing out an intermediate sequenceFile hence
                // location is not passed in OutputJobInfo.getLocation()
                // TODO replace this with a mapreduce constant when available
                jobProperties.put("mapred.output.dir", location);
                jobProperties.put("mapred.output.committer.class", HBaseBulkOutputCommitter.class.getName());
View Full Code Here

  public void setStoreLocation(String location, Job job) throws IOException {
    job.getConfiguration().set(INNER_SIGNATURE, INNER_SIGNATURE_PREFIX + "_" + sign);
    Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign});

    String[] userStr = location.split("\\.");
    OutputJobInfo outputJobInfo;

    String outInfoString = p.getProperty(HCatConstants.HCAT_KEY_OUTPUT_INFO);
    if (outInfoString != null) {
      outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outInfoString);
    } else {
View Full Code Here

    @Override
    public OutputCommitter getOutputCommitter(TaskAttemptContext ctx)
            throws IOException, InterruptedException {
        String serializedJobInfo = ctx.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(serializedJobInfo);
        ResourceSchema rs = PigHCatUtil.getResourceSchema(outputJobInfo.getOutputSchema());
        String location = outputJobInfo.getLocation();
        OutputFormat<BytesWritable,Tuple> outputFormat =  storeFunc.getOutputFormat();
        return new StoreFuncBasedOutputCommitter(storeFunc, outputFormat.getOutputCommitter(ctx), location, rs);
    }
View Full Code Here

    @Override
    public RecordWriter<BytesWritable, Tuple> getRecordWriter(
            TaskAttemptContext ctx) throws IOException, InterruptedException {
        RecordWriter<BytesWritable,Tuple> writer = storeFunc.getOutputFormat().getRecordWriter(ctx);
        String serializedJobInfo = ctx.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(serializedJobInfo);
        ResourceSchema rs = PigHCatUtil.getResourceSchema(outputJobInfo.getOutputSchema());
        String location = outputJobInfo.getLocation();
        return new StoreFuncBasedRecordWriter(writer, storeFunc, location, rs);
    }
View Full Code Here

    }

    private OutputFormat<WritableComparable<?>, Put> getOutputFormat(JobConf job)
            throws IOException {
        String outputInfo = job.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outputInfo);
        OutputFormat<WritableComparable<?>, Put> outputFormat = null;
        if (HBaseHCatStorageHandler.isBulkMode(outputJobInfo)) {
            outputFormat = new HBaseBulkOutputFormat();
        } else {
            outputFormat = new HBaseDirectOutputFormat();
View Full Code Here

        }
        return txn;
    }

    static Transaction getWriteTransaction(Configuration conf) throws IOException {
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
        return (Transaction) HCatUtil.deserialize(outputJobInfo.getProperties()
                                                               .getProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY));
    }
View Full Code Here

        return (Transaction) HCatUtil.deserialize(outputJobInfo.getProperties()
                                                               .getProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY));
    }

    static void setWriteTransaction(Configuration conf, Transaction txn) throws IOException {
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
        outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY, HCatUtil.serialize(txn));
        conf.set(HCatConstants.HCAT_KEY_OUTPUT_INFO, HCatUtil.serialize(outputJobInfo));
    }
View Full Code Here

    }
    return txn;
  }

  static Transaction getWriteTransaction(Configuration conf) throws IOException {
    OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
    return (Transaction) HCatUtil.deserialize(outputJobInfo.getProperties()
      .getProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY));
  }
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.mapreduce.OutputJobInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.