Package org.apache.pig

Examples of org.apache.pig.LoadFunc


           
                ld.setPc(pc);
                Tuple dummyTuple = null;
                for(Result res=ld.getNext(dummyTuple);res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNext(dummyTuple)){
                    Tuple tup = (Tuple) res.result;
                    LoadFunc lf = ((LoadFunc)pc.instantiateFuncFromSpec(ld.getLFile().getFuncSpec()));
                    String key = lf.getLoadCaster().bytesToCharArray(((DataByteArray)tup.get(keyField)).get());
                    Tuple csttup = TupleFactory.getInstance().newTuple(2);
                    csttup.set(0, key);
                    csttup.set(1, lf.getLoadCaster().bytesToInteger(((DataByteArray)tup.get(1)).get()));
                    DataBag vals = null;
                    if(replTbl.containsKey(key)){
                        vals = replTbl.get(key);
                    }
                    else{
View Full Code Here


        Util.createInputFile(cluster,
                "/tmp/testLFTextdir1/testLFTextdir2/testLFTest-input1.txt",
                new String[] {input1});
        // check that loading the top level dir still reading the file a couple
        // of subdirs below
        LoadFunc text1 = new ReadToEndLoader(new TextLoader(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "/tmp/testLFTextdir1", 0);
        Tuple f1 = text1.getNext();
        Tuple f2 = text1.getNext();
        assertTrue(expected1.equals(f1.get(0).toString()) &&
            expected2.equals(f2.get(0).toString()));
        Util.deleteFile(cluster, "testLFTest-input1.txt");
        Util.createInputFile(cluster, "testLFTest-input2.txt");
        LoadFunc text2 = new ReadToEndLoader(new TextLoader(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "testLFTest-input2.txt", 0);
        Tuple f3 = text2.getNext();
        assertTrue(f3 == null);
        Util.deleteFile(cluster, "testLFTest-input2.txt");
    }
View Full Code Here

        String query = "a = load 'testSFPig-input.txt';" +
            "store a into 'testSFPig-output.txt';";
        pigServer.setBatchOn();
        Util.registerMultiLineQuery(pigServer, query);
        pigServer.executeBatch();
        LoadFunc lfunc = new ReadToEndLoader(new PigStorage(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "testSFPig-output.txt", 0);
        Tuple f2 = lfunc.getNext();
       
        assertEquals(f1, f2);
        Util.deleteFile(cluster, "testSFPig-input.txt");
        Util.deleteFile(cluster, "testSFPig-output.txt");
    }
View Full Code Here

    @Test
    public void testLFPig() throws Exception {
        Util.createInputFile(cluster, "input.txt", new String[]
                                        {"this:is:delimited:by:a:colon\n"});
        int arity1 = 6;
        LoadFunc lf = new PigStorage(":");
        LoadFunc p1 = new ReadToEndLoader(lf, ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
        Tuple f1 = p1.getNext();
        assertTrue(f1.size() == arity1);
        Util.deleteFile(cluster, "input.txt");
       
        int LOOP_COUNT = 100;
        String[] input = new String[LOOP_COUNT * LOOP_COUNT];
        int n = 0;
        for (int i = 0; i < LOOP_COUNT; i++) {
            for (int j = 0; j < LOOP_COUNT; j++) {
                input[n++] = (i + "\t" + i + "\t" + j % 2);
            }
        }
        Util.createInputFile(cluster, "input.txt", input);

        LoadFunc p15 = new ReadToEndLoader(new PigStorage(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
       
        int count = 0;
        while (true) {
            Tuple f15 = p15.getNext();
            if (f15 == null)
                break;
            count++;
            assertEquals(3, f15.size());
        }
        assertEquals(LOOP_COUNT * LOOP_COUNT, count);
        Util.deleteFile(cluster, "input.txt");
       
        String input2 = ":this:has:a:leading:colon\n";
        int arity2 = 6;
        Util.createInputFile(cluster, "input.txt", new String[] {input2});
        LoadFunc p2 = new ReadToEndLoader(new PigStorage(":"), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
        Tuple f2 = p2.getNext();
        assertTrue(f2.size() == arity2);
        Util.deleteFile(cluster, "input.txt");
       
        String input3 = "this:has:a:trailing:colon:\n";
        int arity3 = 6;
        Util.createInputFile(cluster, "input.txt", new String[] {input3});
        LoadFunc p3 = new ReadToEndLoader(new PigStorage(":"), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
        Tuple f3 = p3.getNext();
        assertTrue(f3.size() == arity3);
        Util.deleteFile(cluster, "input.txt");
    }
View Full Code Here

        // For all other splits, bind to the first key which is greater
        // then or equal to the first key of the map.

        for(int i=0; i < relationCnt-1; i ++){

            LoadFunc loadfunc = (LoadFunc)PigContext.instantiateFuncFromSpec(sidFuncSpecs.get(i));
            loadfunc.setUDFContextSignature(loaderSignatures.get(i));
            Job dummyJob = new Job(new Configuration(PigMapReduce.sJobConf));
            loadfunc.setLocation(sideFileSpecs.get(i), dummyJob);
            ((IndexableLoadFunc)loadfunc).initialize(dummyJob.getConfiguration());
            sideLoaders.add(loadfunc);
            Tuple rearranged;

            if ( index.get(0).first.equals(curSplitIdx)){
                // This is a first split, bind at very first record in all side relations.
                Tuple t = loadfunc.getNext();
                if(null == t)   // This side relation is entirely empty.
                    continue;
                rearranged = applyLRon(t, i+1);
                heap.offer(rearranged);
                continue;
            }
            else{
                // This is not a first split, we need to bind to the key equal
                // to the firstBaseKey or next key thereafter.

                // First seek close to base key. 
                ((IndexableLoadFunc)loadfunc).seekNear(firstBaseKey instanceof
                        Tuple ? (Tuple) firstBaseKey : mTupleFactory.newTuple(firstBaseKey));

                // Since contract of IndexableLoadFunc is not clear where we
                // will land up after seekNear() call,
                // we start reading from side loader to get to the point where key
                // is actually greater or equal to base key.
                while(true){
                    Tuple t = loadfunc.getNext();
                    if(t==null) // This relation has ended.
                        break;
                    rearranged = applyLRon(t, i+1);
                    if(rearranged.get(1) == null) // If we got a null key here
                        continue;             // it implies we are still behind.
View Full Code Here

        if (message.length()!=0)
            log.info(message);
       
        LoadPushDown.RequiredFieldResponse response = null;
        try {
            LoadFunc loadFunc = load.getLoadFunc();
            if (loadFunc instanceof LoadPushDown) {
                response = ((LoadPushDown)loadFunc).pushProjection(requiredFields);
            }
                               
        } catch (FrontendException e) {
View Full Code Here

           
                ld.setPc(pc);
                Tuple dummyTuple = null;
                for(Result res=ld.getNext(dummyTuple);res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNext(dummyTuple)){
                    Tuple tup = (Tuple) res.result;
                    LoadFunc lf = ((LoadFunc)pc.instantiateFuncFromSpec(ld.getLFile().getFuncSpec()));
                    String key = lf.getLoadCaster().bytesToCharArray(((DataByteArray)tup.get(keyField)).get());
                    Tuple csttup = TupleFactory.getInstance().newTuple(2);
                    csttup.set(0, key);
                    csttup.set(1, lf.getLoadCaster().bytesToInteger(((DataByteArray)tup.get(1)).get()));
                    DataBag vals = null;
                    if(replTbl.containsKey(key)){
                        vals = replTbl.get(key);
                    }
                    else{
View Full Code Here

    public boolean hasCompleted() throws ExecException {
        return true;
    }
   
    public Iterator<Tuple> getResults() throws ExecException {
        final LoadFunc p;
       
        try{
             LoadFunc originalLoadFunc =
                 (LoadFunc)PigContext.instantiateFuncFromSpec(
                         outFileSpec.getFuncSpec());
            
             p = (LoadFunc) new ReadToEndLoader(originalLoadFunc,
                     ConfigurationUtil.toConfiguration(
View Full Code Here

    @Test
    public void testLFPig() throws Exception {
        Util.createInputFile(cluster, "input.txt", new String[]
                                        {"this:is:delimited:by:a:colon\n"});
        int arity1 = 6;
        LoadFunc lf = new PigStorage(":");
        LoadFunc p1 = new ReadToEndLoader(lf, ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
        Tuple f1 = p1.getNext();
        assertTrue(f1.size() == arity1);
        Util.deleteFile(cluster, "input.txt");
       
        int LOOP_COUNT = 100;
        String[] input = new String[LOOP_COUNT * LOOP_COUNT];
        int n = 0;
        for (int i = 0; i < LOOP_COUNT; i++) {
            for (int j = 0; j < LOOP_COUNT; j++) {
                input[n++] = (i + "\t" + i + "\t" + j % 2);
            }
        }
        Util.createInputFile(cluster, "input.txt", input);

        LoadFunc p15 = new ReadToEndLoader(new PigStorage(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
       
        int count = 0;
        while (true) {
            Tuple f15 = p15.getNext();
            if (f15 == null)
                break;
            count++;
            assertEquals(3, f15.size());
        }
        assertEquals(LOOP_COUNT * LOOP_COUNT, count);
        Util.deleteFile(cluster, "input.txt");
       
        String input2 = ":this:has:a:leading:colon\n";
        int arity2 = 6;
        Util.createInputFile(cluster, "input.txt", new String[] {input2});
        LoadFunc p2 = new ReadToEndLoader(new PigStorage(":"), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
        Tuple f2 = p2.getNext();
        assertTrue(f2.size() == arity2);
        Util.deleteFile(cluster, "input.txt");
       
        String input3 = "this:has:a:trailing:colon:\n";
        int arity3 = 6;
        Util.createInputFile(cluster, "input.txt", new String[] {input3});
        LoadFunc p3 = new ReadToEndLoader(new PigStorage(":"), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "input.txt", 0);
        Tuple f3 = p3.getNext();
        assertTrue(f3.size() == arity3);
        Util.deleteFile(cluster, "input.txt");
    }
View Full Code Here

        Util.createInputFile(cluster,
                "/tmp/testLFTextdir1/testLFTextdir2/testLFTest-input1.txt",
                new String[] {input1});
        // check that loading the top level dir still reading the file a couple
        // of subdirs below
        LoadFunc text1 = new ReadToEndLoader(new TextLoader(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "/tmp/testLFTextdir1", 0);
        Tuple f1 = text1.getNext();
        Tuple f2 = text1.getNext();
        assertTrue(expected1.equals(f1.get(0).toString()) &&
            expected2.equals(f2.get(0).toString()));
        Util.deleteFile(cluster, "testLFTest-input1.txt");
        Util.createInputFile(cluster, "testLFTest-input2.txt");
        LoadFunc text2 = new ReadToEndLoader(new TextLoader(), ConfigurationUtil.
                toConfiguration(cluster.getProperties()), "testLFTest-input2.txt", 0);
        Tuple f3 = text2.getNext();
        assertTrue(f3 == null);
        Util.deleteFile(cluster, "testLFTest-input2.txt");
    }
View Full Code Here

TOP

Related Classes of org.apache.pig.LoadFunc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.