Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream$Buffer


    Assert.assertNotNull("No files found written to HDFS", statuses);

    for (FileStatus status : statuses) {
      Path filePath = status.getPath();
      logger.info("Found file on DFS: {}", filePath);
      FSDataInputStream stream = fs.open(filePath);
      BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
      String line = reader.readLine();
      logger.info("First line in file {}: {}", filePath, line);
      Assert.assertTrue(line.startsWith("yarg"));
    }
View Full Code Here


    Assert.assertNotNull("No files found written to HDFS", statuses);

    for (FileStatus status : statuses) {
      Path filePath = status.getPath();
      logger.info("Found file on DFS: {}", filePath);
      FSDataInputStream stream = fs.open(filePath);
      BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
      String line = reader.readLine();
      logger.info("First line in file {}: {}", filePath, line);
      Assert.assertTrue(line.startsWith("yarg"));
    }
View Full Code Here

    int found = 0;
    int expected = bodies.size();
    for(String outputFile : getAllFiles(dir)) {
      String name = (new File(outputFile)).getName();
      if(name.startsWith(prefix)) {
        FSDataInputStream input = fs.open(new Path(outputFile));
        BufferedReader reader = new BufferedReader(new InputStreamReader(input));
        String body = null;
        while((body = reader.readLine()) != null) {
          bodies.remove(body);
          found++;
View Full Code Here

    int found = 0;
    int expected = bodies.size();
    for(String outputFile : getAllFiles(dir)) {
      String name = (new File(outputFile)).getName();
      if(name.startsWith(prefix)) {
        FSDataInputStream input = fs.open(new Path(outputFile));
        DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
        DataFileStream<GenericRecord> avroStream =
            new DataFileStream<GenericRecord>(input, reader);
        GenericRecord record = new GenericData.Record(avroStream.getSchema());
        while (avroStream.hasNext()) {
          avroStream.next(record);
          ByteBuffer body = (ByteBuffer) record.get("body");
          CharsetDecoder decoder = Charsets.UTF_8.newDecoder();
          String bodyStr = decoder.decode(body).toString();
          LOG.debug("Removing event: {}", bodyStr);
          bodies.remove(bodyStr);
          found++;
        }
        avroStream.close();
        input.close();
      }
    }
    Assert.assertTrue("Found = " + found + ", Expected = "  +
        expected + ", Left = " + bodies.size() + " " + bodies,
          bodies.size() == 0);
View Full Code Here

    });

    for (String uri : uris) {

      InputStream in = null;
      FSDataInputStream i = null;

      try {
        Path srcPat = new Path(uri);
        FileSystem srcFs = getFS(srcPat);

        for (Path src : FileUtil.stat2Paths(srcFs.globStatus(srcPat), srcPat)) {
          Assert.isTrue(srcFs.isFile(src), "Source must be a file");
          i = srcFs.open(src);
          switch (i.readShort()) {
          case 0x1f8b: // RFC 1952
            i.seek(0);
            in = new GZIPInputStream(i);
            break;
          case 0x5345: // 'S' 'E'
            if (i.readByte() == 'Q') {
              i.close();
              in = new TextRecordInputStream(src, srcFs, configuration);
            }
            break;
          default:
            in = i;
            break;
          }
          i.seek(0);
          texts.add(getContent(in));
        }
      } catch (IOException ex) {
        throw new HadoopException("Cannot read " + uri + ";" + ex.getMessage(), ex);
      } finally {
View Full Code Here

            end = start + split.getLength();
            final Path file = split.getPath();
            // open the file and seek to the start of the split
            FileSystem fs = file.getFileSystem(job);
            FSDataInputStream fileIn = fs.open(split.getPath());
           
            // Seek to the start of the file
            fileIn.seek(start);
       
            if(file.toString().endsWith(".bz2") || file.toString().endsWith(".bz"))
            {
              // For bzip2 files use CBZip2InputStream to read and supply the upper input stream.
               CBZip2InputStream in = new CBZip2InputStream(fileIn,9, end);
View Full Code Here

        File destination = new File(testDestinationDirectory.getAbsolutePath() + "1");
        Utils.mkdirs(destination);
        File copyLocation = new File(destination, "0_0.index");

        FSDataInputStream input = null;

        input = fs.open(source);
        FSDataInputStream spyinput = Mockito.spy(input);

        Mockito.doAnswer(Mockito.CALLS_REAL_METHODS)
               .doThrow(new EofException())
               .when(spyinput)
               .read();
View Full Code Here

     */
    private byte[] calculateCheckSumForFile(Path source) throws Exception {
        CheckSum fileCheckSumGenerator = CheckSum.getInstance(CheckSumType.MD5);
        byte[] buffer = new byte[VoldemortConfig.DEFAULT_BUFFER_SIZE];

        FSDataInputStream input = null;

        Configuration config = new Configuration();

        FileSystem fs = source.getFileSystem(config);
        input = fs.open(source);

        while(true) {
            int read = input.read(buffer);
            if(read < 0) {
                break;
            }
            // Update the per file checksum
            if(fileCheckSumGenerator != null) {
View Full Code Here

    public static String readFileContents(FileSystem fs, Path path, int bufferSize)
            throws IOException {
        if(bufferSize <= 0)
            return new String();

        FSDataInputStream input = fs.open(path);
        byte[] buffer = new byte[bufferSize];

        ByteArrayOutputStream stream = new ByteArrayOutputStream();

        while(true) {
            int read = input.read(buffer);
            if(read < 0) {
                break;
            } else {
                buffer = ByteUtils.copy(buffer, 0, read);
            }
View Full Code Here

                                          File dest,
                                          CopyStats stats,
                                          CheckSumType checkSumType) throws Throwable {
        CheckSum fileCheckSumGenerator = null;
        logger.debug("Starting copy of " + source + " to " + dest);
        FSDataInputStream input = null;
        OutputStream output = null;

        for(int attempt = 0; attempt < maxAttempts; attempt++) {
            boolean success = true;
            long totalBytesRead = 0;
            boolean fsOpened = false;
            try {

                // Create a per file checksum generator
                if(checkSumType != null) {
                    fileCheckSumGenerator = CheckSum.getInstance(checkSumType);
                }

                logger.info("Attempt " + attempt + " at copy of " + source + " to " + dest);

                input = fs.open(source);
                fsOpened = true;

                output = new BufferedOutputStream(new FileOutputStream(dest));
                byte[] buffer = new byte[bufferSize];
                while(true) {
                    int read = input.read(buffer);
                    if(read < 0) {
                        break;
                    } else {
                        output.write(buffer, 0, read);
                    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSDataInputStream$Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.