Examples of readRawRecord()


Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

                RecordOutputStream os = fact.getOutputStream(fs, tmpFile);
                for(Path i: sources) {
                    LOG.info("Opening " + i.toString() + " for consolidation");
                    RecordInputStream is = fact.getInputStream(fs, i);
                    byte[] record;
                    while((record = is.readRawRecord()) != null) {
                        os.writeRaw(record);
                    }
                    is.close();
                    rprtr.progress();
                }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

    public String getMetadata(String metafilename) throws IOException {
        Path metaPath = toStoredMetadataPath(metafilename);
        if(exists(metaPath)) {
            RecordInputStream is = createInputStream(metaPath);
            String metaStr = new String(is.readRawRecord(), "UTF-8");
            is.close();
            return metaStr.substring(1);
        } else {
            return null;
        }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

    public static List<String> getPailRecords(Pail pail) throws IOException {
        List<String> ret = new ArrayList<String>();
        for(String s: pail.getUserFileNames()) {
            RecordInputStream is = pail.openRead(s);
            while(true) {
                byte[] r = is.readRawRecord();
                if(r==null) break;
                ret.add(new String(r));
            }
            is.close();
        }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

            RecordOutputStream fout = factout.getOutputStream(fsDest, target);

            try {
                byte[] record;
                int bytes = 0;
                while((record = fin.readRawRecord()) != null) {
                    fout.writeRaw(record);
                    bytes+=record.length;
                    if(bytes >= 1000000) { //every 1 MB of data report progress so we don't time out on large files
                        bytes = 0;
                        reporter.progress();
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

                RecordOutputStream os = fact.getOutputStream(fs, tmpFile);
                for(Path i: sources) {
                    LOG.info("Opening " + i.toString() + " for consolidation");
                    RecordInputStream is = fact.getInputStream(fs, i);
                    byte[] record;
                    while((record = is.readRawRecord()) != null) {
                        os.writeRaw(record);
                    }
                    is.close();
                    rprtr.progress();
                }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

        }
        assertTrue(parents.contains(""));
        assertTrue(parents.contains("1"));

        RecordInputStream in = p1.openRead("file1");
        String s = new String(in.readRawRecord());
        assertTrue(in.readRawRecord()==null);
        in.close();
        assertEquals("a", s);

    }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

        assertTrue(parents.contains(""));
        assertTrue(parents.contains("1"));

        RecordInputStream in = p1.openRead("file1");
        String s = new String(in.readRawRecord());
        assertTrue(in.readRawRecord()==null);
        in.close();
        assertEquals("a", s);

    }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

    private void checkContains(Pail pail, String file, byte[]... expected) throws Exception {
        RecordInputStream is = pail.openRead(file);
        List<byte[]> records = new ArrayList<byte[]>();
        while(true) {
            byte[] arr = is.readRawRecord();
            if(arr==null) break;
            records.add(arr);
        }
        assertEquals(expected.length, records.size());
        for(int i=0; i < expected.length; i++) {
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

    protected List<byte[]> getRecords(Pail p, String userfile) throws Exception {
        List<byte[]> ret = new ArrayList<byte[]>();
        RecordInputStream is = p.openRead(userfile);
        byte[] record;
        while((record = is.readRawRecord())!=null) {
            ret.add(record);
        }
        is.close();
        return ret;
    }
View Full Code Here

Examples of com.backtype.hadoop.formats.RecordInputStream.readRawRecord()

            RecordOutputStream fout = factout.getOutputStream(fsDest, target);

            try {
                byte[] record;
                int bytes = 0;
                while((record = fin.readRawRecord()) != null) {
                    fout.writeRaw(record);
                    bytes+=record.length;
                    if(bytes >= 1000000) { //every 1 MB of data report progress so we don't time out on large files
                        bytes = 0;
                        reporter.progress();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.