Package org.apache.pig.backend.hadoop.datastorage

Examples of org.apache.pig.backend.hadoop.datastorage.HDataStorage.asContainer()


    }

    public RecordReader<Text, Tuple> makeReader(JobConf job) throws IOException {
        lastConf = job;       
        DataStorage store = new HDataStorage(ConfigurationUtil.toProperties(job));
        store.setActiveContainer(store.asContainer("/user/" + job.getUser()));
        wrapped.init(store);
       
        // Mimic org.apache.hadoop.mapred.FileSplit if feasible...
        String[] locations = wrapped.getLocations();
        if (locations.length > 0) {
View Full Code Here


        lastConf = job;       
        DataStorage store = new HDataStorage(ConfigurationUtil.toProperties(job));
        // if the execution is against Mapred DFS, set
        // working dir to /user/<userid>
        if(execType == ExecType.MAPREDUCE)
            store.setActiveContainer(store.asContainer("/user/" + job.getUser()));
        PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(job.get("udf.import.list")));
        wrapped.init(store);
       
        job.set("map.target.ops", ObjectSerializer.serialize(targetOps));
        // Mimic org.apache.hadoop.mapred.FileSplit if feasible...
View Full Code Here

        lastConf = job;       
        DataStorage store = new HDataStorage(ConfigurationUtil.toProperties(job));
        // if the execution is against Mapred DFS, set
        // working dir to /user/<userid>
        if(execType == ExecType.MAPREDUCE)
            store.setActiveContainer(store.asContainer("/user/" + job.getUser()));
        PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(job.get("udf.import.list")));
        List<String> inpSignatureLists = (ArrayList<String>)ObjectSerializer.deserialize(job.get("pig.inpSignatures"));
        if (inpSignatureLists.get(index)!=null) {
            // Pass loader signature to slice
            store.getConfiguration().setProperty("pig.loader.signature", inpSignatureLists.get(index));
View Full Code Here

        lastConf = job;       
        DataStorage store = new HDataStorage(ConfigurationUtil.toProperties(job));
        // if the execution is against Mapred DFS, set
        // working dir to /user/<userid>
        if(pigContext.getExecType() == ExecType.MAPREDUCE)
            store.setActiveContainer(store.asContainer("/user/" + job.getUser()));
        wrapped.init(store);
       
        job.set("map.target.ops", ObjectSerializer.serialize(targetOps));
        // Mimic org.apache.hadoop.mapred.FileSplit if feasible...
        String[] locations = wrapped.getLocations();
View Full Code Here

    }

    public RecordReader<Text, Tuple> makeReader(JobConf job) throws IOException {
        lastConf = job;       
        DataStorage store = new HDataStorage(ConfigurationUtil.toProperties(job));
        store.setActiveContainer(store.asContainer("/user/" + job.getUser()));
        wrapped.init(store);
       
        // Mimic org.apache.hadoop.mapred.FileSplit if feasible...
        String[] locations = wrapped.getLocations();
        if (locations.length > 0) {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.