Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSInputStream


   
    assertEquals("Block size", Math.min(len, BLOCK_SIZE), s3FileSystem.getBlockSize(path));

    assertEquals("Length", len, s3FileSystem.getLength(path));

    FSInputStream in = s3FileSystem.openRaw(path);
    byte[] buf = new byte[len];

    in.readFully(0, buf);

    assertEquals(len, buf.length);
    for (int i = 0; i < buf.length; i++) {
      assertEquals("Position " + i, data[i], buf[i]);
    }
View Full Code Here


        long lastLong = randomDataGenerator.nextLong();
        randomDataGenerator = makeRandomDataGenerator(); // restart (make new) PRNG
        ListIterator li = testfilesList.listIterator();
        while (li.hasNext()) {
          testFileName = (UTF8) li.next();
          FSInputStream nis = dfsClient.open(testFileName.toString());
          byte[] bufferGolden = new byte[bufferSize];
          int m = 42;
          try {
            while (m != -1) {
              m = nis.read(buffer);
              if (m == buffer.length) {
                randomDataGenerator.nextBytes(bufferGolden);
                assertBytesEqual(buffer, bufferGolden, buffer.length);
              } else if (m > 0) {
                byte[] bufferGoldenPartial = new byte[m];
                randomDataGenerator.nextBytes(bufferGoldenPartial);
                assertBytesEqual(buffer, bufferGoldenPartial, bufferGoldenPartial.length);
              }
            }
          } finally {
            nis.close();
          }
        }
        // verify last randomDataGenerator rand val to ensure last file length was checked
        long lastLongAgain = randomDataGenerator.nextLong();
        assertEquals(lastLong, lastLongAgain);
View Full Code Here

      if ( isDir ) {
        verifyDir(client, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = client.open(path);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(path, fileCRC.getValue());
      }
View Full Code Here

      if ( isDir ) {
        verifyDir(dfs, path);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = dfs.dfs.open(pathName);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(pathName, fileCRC.getValue());
      }
View Full Code Here

      response.sendError(400, e.getMessage());
      return;
    }
   
    long fileLen = dfs.getFileInfo(filename).getLen();
    FSInputStream in = dfs.open(filename);
    OutputStream os = response.getOutputStream();

    try {
      if (reqRanges != null) {
        List ranges = InclusiveByteRange.satisfiableRanges(reqRanges,
                                                           fileLen);
        StreamFile.sendPartialData(in, os, response, fileLen, ranges);
      } else {
        // No ranges, so send entire file
        response.setHeader("Content-Disposition", "attachment; filename=\"" +
                           filename + "\"");
        response.setContentType("application/octet-stream");
        StreamFile.writeTo(in, os, 0L, fileLen);
      }
    } finally {
      in.close();
      os.close();
      dfs.close();
    }     
  }
View Full Code Here

    return a;
  }
 
  public void testWriteTo() throws IOException, InterruptedException {

    FSInputStream fsin = new MockFSInputStream();
    ByteArrayOutputStream os = new ByteArrayOutputStream();

    // new int[]{s_1, c_1, s_2, c_2, ..., s_n, c_n} means to test
    // reading c_i bytes starting at s_i
    int[] pairs = new int[]{ 0, 10000,
View Full Code Here

    Enumeration e = (new Vector<String>(l)).elements();
    return InclusiveByteRange.satisfiableRanges(e, contentLength);
  }
 
  public void testSendPartialData() throws IOException, InterruptedException {
    FSInputStream in = new MockFSInputStream();
    ByteArrayOutputStream os = new ByteArrayOutputStream();

    // test if multiple ranges, then 416
    {
      List ranges = strToRanges("0-,10-300", 500);
View Full Code Here

      if ( isDir ) {
        verifyDir(dfs, path, overallChecksum);
      } else {
        // this is not a directory. Checksum the file data.
        CRC32 fileCRC = new CRC32();
        FSInputStream in = dfsOpenFileWithRetries(dfs, pathName);
        byte[] buf = new byte[4096];
        int nRead = 0;
        while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {
          fileCRC.update(buf, 0, nRead);
        }
       
        verifyChecksum(pathName, fileCRC.getValue());
      }
View Full Code Here

   
    assertEquals("Block size", Math.min(len, BLOCK_SIZE), s3FileSystem.getBlockSize(path));

    assertEquals("Length", len, s3FileSystem.getLength(path));

    FSInputStream in = s3FileSystem.openRaw(path);
    byte[] buf = new byte[len];

    in.readFully(0, buf);

    assertEquals(len, buf.length);
    for (int i = 0; i < buf.length; i++) {
      assertEquals("Position " + i, data[i], buf[i]);
    }
View Full Code Here

    Path newPath = new Path("/test/hadoop/newfile");
    s3FileSystem.rename(path, newPath);
    assertFalse("No longer exists", s3FileSystem.exists(path));
    assertTrue("Moved", s3FileSystem.exists(newPath));

    FSInputStream in = s3FileSystem.openRaw(newPath);
    byte[] buf = new byte[len];
   
    in.readFully(0, buf);

    assertEquals(len, buf.length);
    for (int i = 0; i < buf.length; i++) {
      assertEquals("Position " + i, data[i], buf[i]);
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.