Package org.apache.hadoop.hdfs.web

Examples of org.apache.hadoop.hdfs.web.WebHdfsFileSystem$OffsetUrlOpener


    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
    InputStream istream = webfs.open(file);
    int val = istream.read();
    istream.close();

    verifyAuditLogsRepeat(true, 3);
    assertTrue("failed to read from file", val >= 0);
View Full Code Here


    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
    FileStatus st = webfs.getFileStatus(file);

    verifyAuditLogs(true);
    assertTrue("failed to stat file", st != null && st.isFile());
  }
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0600));
    fs.setOwner(file, "root", null);

    setupAuditLogs();
    try {
      WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
      InputStream istream = webfs.open(file);
      int val = istream.read();
      fail("open+read must not succeed, got " + val);
    } catch(AccessControlException E) {
      System.out.println("got access denied, as expected.");
    }
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
    webfs.open(file);

    verifyAuditLogsCheckPattern(true, 3, webOpenPattern);
  }
View Full Code Here

      int port = viewer.getPort();

      // create a WebHdfsFileSystem instance
      URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
      Configuration conf = new Configuration();
      WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.get(uri, conf);

      // verify the number of directories
      FileStatus[] statuses = webhdfs.listStatus(new Path("/"));
      assertEquals(NUM_DIRS + 2, statuses.length); // contains empty and xattr directory

      // verify the number of files in the directory
      statuses = webhdfs.listStatus(new Path("/dir0"));
      assertEquals(FILES_PER_DIR, statuses.length);

      // compare a file
      FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0];
      FileStatus expected = writtenFiles.get("/dir0/file0");
      compareFile(expected, status);

      // LISTSTATUS operation to an empty directory
      statuses = webhdfs.listStatus(new Path("/emptydir"));
      assertEquals(0, statuses.length);

      // LISTSTATUS operation to a invalid path
      URL url = new URL("http://localhost:" + port +
                    "/webhdfs/v1/invalid/?op=LISTSTATUS");
      verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);

      // LISTSTATUS operation to a invalid prefix
      url = new URL("http://localhost:" + port + "/webhdfs/v1?op=LISTSTATUS");
      verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);

      // GETFILESTATUS operation
      status = webhdfs.getFileStatus(new Path("/dir0/file0"));
      compareFile(expected, status);

      // GETFILESTATUS operation to a invalid path
      url = new URL("http://localhost:" + port +
                    "/webhdfs/v1/invalid/?op=GETFILESTATUS");
View Full Code Here

    final String uri = WebHdfsFileSystem.SCHEME  + "://"
        + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
    //get file system as JobTracker
    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
        "JobTracker", new String[]{"user"});
    final WebHdfsFileSystem webhdfs = ugi.doAs(
        new PrivilegedExceptionAction<WebHdfsFileSystem>() {
      @Override
      public WebHdfsFileSystem run() throws Exception {
        return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
      }
    });

    { //test addDelegationTokens(..)
      Credentials creds = new Credentials();
      final Token<?> tokens[] = webhdfs.addDelegationTokens("JobTracker", creds);
      Assert.assertEquals(1, tokens.length);
      Assert.assertEquals(1, creds.numberOfTokens());
      Assert.assertSame(tokens[0], creds.getAllTokens().iterator().next());
      checkTokenIdentifier(ugi, tokens[0]);
      final Token<?> tokens2[] = webhdfs.addDelegationTokens("JobTracker", creds);
      Assert.assertEquals(0, tokens2.length);
    }
  }
View Full Code Here

      int port = viewer.getPort();

      // create a WebHdfsFileSystem instance
      URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
      Configuration conf = new Configuration();
      WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.get(uri, conf);

      // GETACLSTATUS operation to a directory without ACL
      AclStatus acl = webhdfs.getAclStatus(new Path("/dirWithNoAcl"));
      assertEquals(writtenAcls.get("/dirWithNoAcl"), acl);

      // GETACLSTATUS operation to a directory with a default ACL
      acl = webhdfs.getAclStatus(new Path("/dirWithDefaultAcl"));
      assertEquals(writtenAcls.get("/dirWithDefaultAcl"), acl);

      // GETACLSTATUS operation to a file without ACL
      acl = webhdfs.getAclStatus(new Path("/noAcl"));
      assertEquals(writtenAcls.get("/noAcl"), acl);

      // GETACLSTATUS operation to a file with a ACL
      acl = webhdfs.getAclStatus(new Path("/withAcl"));
      assertEquals(writtenAcls.get("/withAcl"), acl);

      // GETACLSTATUS operation to a file with several ACL entries
      acl = webhdfs.getAclStatus(new Path("/withSeveralAcls"));
      assertEquals(writtenAcls.get("/withSeveralAcls"), acl);

      // GETACLSTATUS operation to a invalid path
      URL url = new URL("http://localhost:" + port +
          "/webhdfs/v1/invalid/?op=GETACLSTATUS");
View Full Code Here

 
  @Test(timeout=5000)
  public void testWebHdfsDoAs() throws Exception {
    WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()");
    WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName());
    final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config, WebHdfsFileSystem.SCHEME);
   
    final Path root = new Path("/");
    cluster.getFileSystem().setPermission(root, new FsPermission((short)0777));

    Whitebox.setInternalState(webhdfs, "ugi", proxyUgi);

    {
      Path responsePath = webhdfs.getHomeDirectory();
      WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
      Assert.assertEquals(webhdfs.getUri() + "/user/" + PROXY_USER, responsePath.toString());
    }

    final Path f = new Path("/testWebHdfsDoAs/a.txt");
    {
      FSDataOutputStream out = webhdfs.create(f);
      out.write("Hello, webhdfs user!".getBytes());
      out.close();
 
      final FileStatus status = webhdfs.getFileStatus(f);
      WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner());
      Assert.assertEquals(PROXY_USER, status.getOwner());
    }

    {
      final FSDataOutputStream out = webhdfs.append(f);
      out.write("\nHello again!".getBytes());
      out.close();
 
      final FileStatus status = webhdfs.getFileStatus(f);
      WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner());
      WebHdfsTestUtil.LOG.info("status.getLen()  =" + status.getLen());
      Assert.assertEquals(PROXY_USER, status.getOwner());
    }
  }
View Full Code Here

  }
 
  @Test
  public void testWebHdfsCustomDefaultPorts() throws IOException {
    URI uri = URI.create("webhdfs://localhost");
    WebHdfsFileSystem fs = (WebHdfsFileSystem) FileSystem.get(uri, conf);

    assertEquals(123, fs.getDefaultPort());
    assertEquals(uri, fs.getUri());
    assertEquals("127.0.0.1:123", fs.getCanonicalServiceName());
  }
View Full Code Here

  }

  @Test
  public void testWebHdfsCustomUriPortWithCustomDefaultPorts() throws IOException {
    URI uri = URI.create("webhdfs://localhost:789");
    WebHdfsFileSystem fs = (WebHdfsFileSystem) FileSystem.get(uri, conf);

    assertEquals(123, fs.getDefaultPort());
    assertEquals(uri, fs.getUri());
    assertEquals("127.0.0.1:789", fs.getCanonicalServiceName());
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.web.WebHdfsFileSystem$OffsetUrlOpener

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.