Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSInputStream


    Path newPath = new Path("/test/hadoop/newfile");
    s3FileSystem.rename(path, newPath);
    assertFalse("No longer exists", s3FileSystem.exists(path));
    assertTrue("Moved", s3FileSystem.exists(newPath));

    FSInputStream in = s3FileSystem.openRaw(newPath);
    byte[] buf = new byte[len];
   
    in.readFully(0, buf);

    assertEquals(len, buf.length);
    for (int i = 0; i < buf.length; i++) {
      assertEquals("Position " + i, data[i], buf[i]);
    }
View Full Code Here


      if ( isDir ) {
        verifyDir(client, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = client.open(path);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(path, fileCRC.getValue());
      }
View Full Code Here

    HttpURLConnection connection = null;
    connection = openConnection("/data" + f.toUri().getPath(), "ugi=" + ugi);
    connection.setRequestMethod("GET");
    connection.connect();
    final InputStream in = connection.getInputStream();
    return new FSDataInputStream(new FSInputStream() {
        public int read() throws IOException {
          return in.read();
        }
        public int read(byte[] b, int off, int len) throws IOException {
          return in.read(b, off, len);
View Full Code Here

        long lastLong = randomDataGenerator.nextLong();
        randomDataGenerator = makeRandomDataGenerator(); // restart (make new) PRNG
        ListIterator li = testfilesList.listIterator();
        while (li.hasNext()) {
          testFileName = (UTF8) li.next();
          FSInputStream nis = dfsClient.open(testFileName);
          byte[] bufferGolden = new byte[bufferSize];
          int m = 42;
          try {
            while (m != -1) {
              m = nis.read(buffer);
              if (m == buffer.length) {
                randomDataGenerator.nextBytes(bufferGolden);
                assertBytesEqual(buffer, bufferGolden, buffer.length);
              } else if (m > 0) {
                byte[] bufferGoldenPartial = new byte[m];
                randomDataGenerator.nextBytes(bufferGoldenPartial);
                assertBytesEqual(buffer, bufferGoldenPartial, bufferGoldenPartial.length);
              }
            }
          } finally {
            nis.close();
          }
        }
        // verify last randomDataGenerator rand val to ensure last file length was checked
        long lastLongAgain = randomDataGenerator.nextLong();
        assertEquals(lastLong, lastLongAgain);
View Full Code Here

      actual[idx] = 0;
    }
  }
 
  private void seekReadFile(FileSystem fileSys, Path name) throws IOException {
    FSInputStream stmRaw = fileSys.openRaw(name);
    FSDataInputStream stm = new FSDataInputStream(stmRaw, 4096);
    byte[] expected = new byte[ONEMB];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
   
View Full Code Here

    HttpURLConnection connection = null;
    connection = openConnection("/data" + f.toUri().getPath(), "ugi=" + ugi);
    connection.setRequestMethod("GET");
    connection.connect();
    final InputStream in = connection.getInputStream();
    return new FSDataInputStream(new FSInputStream() {
        public int read() throws IOException {
          return in.read();
        }
        public int read(byte[] b, int off, int len) throws IOException {
          return in.read(b, off, len);
View Full Code Here

      if ( isDir ) {
        verifyDir(dfs, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = dfs.dfs.open(pathName);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(pathName, fileCRC.getValue());
      }
View Full Code Here

  }
 
  private void checkFile(FileSystem fileSys, Path name) throws IOException {
    String[][] locations = fileSys.getFileCacheHints(name, 0, fileSize);
    assertEquals("Number of blocks", fileSize, locations.length);
    FSInputStream stm = fileSys.openRaw(name);
    byte[] expected = new byte[fileSize];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
    // do a sanity check. Read the file
    byte[] actual = new byte[fileSize];
    stm.readFully(0, actual);
    checkAndEraseData(actual, 0, expected, "Read Sanity Test");
    stm.close();
  }
View Full Code Here

      actual[idx] = 0;
    }
  }
 
  private void seekReadFile(FileSystem fileSys, Path name) throws IOException {
    FSInputStream stmRaw = fileSys.openRaw(name);
    FSDataInputStream stm = new FSDataInputStream(stmRaw, 4096);
    byte[] expected = new byte[ONEMB];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
   
View Full Code Here

  /*
   * Read some data, skip a few bytes and read more. HADOOP-922.
   */
  private void smallReadSeek(FileSystem fileSys, Path name) throws IOException {
    FSInputStream stmRaw = fileSys.openRaw(name);
    byte[] expected = new byte[ONEMB];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
   
    // Issue a simple read first.
  byte[] actual = new byte[128];
    stmRaw.seek(100000);
    stmRaw.read(actual, 0, actual.length);
    checkAndEraseData(actual, 100000, expected, "First Small Read Test");

    // now do a small seek of 4 bytes, within the same block.
    int newpos1 = 100000 + 128 + 4;
    stmRaw.seek(newpos1);
    stmRaw.read(actual, 0, actual.length);
    checkAndEraseData(actual, newpos1, expected, "Small Seek Bug 1");

    // seek another 256 bytes this time
    int newpos2 = newpos1 + 256;
    stmRaw.seek(newpos2);
    stmRaw.read(actual, 0, actual.length);
    checkAndEraseData(actual, newpos2, expected, "Small Seek Bug 2");

    // all done
    stmRaw.close();
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.