Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSInputStream


      if ( isDir ) {
        verifyDir(dfs, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = dfs.dfs.open(pathName);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(pathName, fileCRC.getValue());
      }
View Full Code Here


      if ( isDir ) {
        verifyDir(client, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = client.open(path);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(path, fileCRC.getValue());
      }
View Full Code Here

        long lastLong = randomDataGenerator.nextLong();
        randomDataGenerator = makeRandomDataGenerator(); // restart (make new) PRNG
        ListIterator li = testfilesList.listIterator();
        while (li.hasNext()) {
          testFileName = (UTF8) li.next();
          FSInputStream nis = dfsClient.open(testFileName.toString());
          byte[] bufferGolden = new byte[bufferSize];
          int m = 42;
          try {
            while (m != -1) {
              m = nis.read(buffer);
              if (m == buffer.length) {
                randomDataGenerator.nextBytes(bufferGolden);
                assertBytesEqual(buffer, bufferGolden, buffer.length);
              } else if (m > 0) {
                byte[] bufferGoldenPartial = new byte[m];
                randomDataGenerator.nextBytes(bufferGoldenPartial);
                assertBytesEqual(buffer, bufferGoldenPartial, bufferGoldenPartial.length);
              }
            }
          } finally {
            nis.close();
          }
        }
        // verify last randomDataGenerator rand val to ensure last file length was checked
        long lastLongAgain = randomDataGenerator.nextLong();
        assertEquals(lastLong, lastLongAgain);
View Full Code Here

    final long filelength = cl == null? -1: Long.parseLong(cl);
    if (LOG.isDebugEnabled()) {
      LOG.debug("filelength = " + filelength);
    }

    return new FSDataInputStream(new FSInputStream() {
        long currentPos = 0;

        private void update(final boolean isEOF, final int n
            ) throws IOException {
          if (!isEOF) {
View Full Code Here

    HttpURLConnection connection = null;
    connection = openConnection("/data" + f.toUri().getPath(), "ugi=" + ugi);
    connection.setRequestMethod("GET");
    connection.connect();
    final InputStream in = connection.getInputStream();
    return new FSDataInputStream(new FSInputStream() {
        public int read() throws IOException {
          return in.read();
        }
        public int read(byte[] b, int off, int len) throws IOException {
          return in.read(b, off, len);
View Full Code Here

    return a;
  }
 
  public void testWriteTo() throws IOException, InterruptedException {

    FSInputStream fsin = new MockFSInputStream();
    ByteArrayOutputStream os = new ByteArrayOutputStream();

    // new int[]{s_1, c_1, s_2, c_2, ..., s_n, c_n} means to test
    // reading c_i bytes starting at s_i
    int[] pairs = new int[]{ 0, 10000,
View Full Code Here

    Enumeration<?> e = (new Vector<String>(l)).elements();
    return InclusiveByteRange.satisfiableRanges(e, contentLength);
  }
 
  public void testSendPartialData() throws IOException, InterruptedException {
    FSInputStream in = new MockFSInputStream();
    ByteArrayOutputStream os = new ByteArrayOutputStream();

    // test if multiple ranges, then 416
    {
      List<?> ranges = strToRanges("0-,10-300", 500);
View Full Code Here

    setupSslProps(conf);
    HttpURLConnection connection = null;
    connection = openConnection(hostname, port, path);
    connection.connect();
    final InputStream in = connection.getInputStream();
    return new FSDataInputStream(new FSInputStream() {
      public int read() throws IOException {
        return in.read();
      }

      public int read(byte[] b, int off, int len) throws IOException {
View Full Code Here

        long lastLong = randomDataGenerator.nextLong();
        randomDataGenerator = makeRandomDataGenerator(); // restart (make new) PRNG
        ListIterator li = testfilesList.listIterator();
        while (li.hasNext()) {
          testFileName = (UTF8) li.next();
          FSInputStream nis = dfsClient.open(testFileName);
          byte[] bufferGolden = new byte[bufferSize];
          int m = 42;
          try {
            while (m != -1) {
              m = nis.read(buffer);
              if (m == buffer.length) {
                randomDataGenerator.nextBytes(bufferGolden);
                assertBytesEqual(buffer, bufferGolden, buffer.length);
              } else if (m > 0) {
                byte[] bufferGoldenPartial = new byte[m];
                randomDataGenerator.nextBytes(bufferGoldenPartial);
                assertBytesEqual(buffer, bufferGoldenPartial, bufferGoldenPartial.length);
              }
            }
          } finally {
            nis.close();
          }
        }
        // verify last randomDataGenerator rand val to ensure last file length was checked
        long lastLongAgain = randomDataGenerator.nextLong();
        assertEquals(lastLong, lastLongAgain);
View Full Code Here

      if ( isDir ) {
        verifyDir(dfs, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = dfs.dfs.open(pathName);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(pathName, fileCRC.getValue());
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.