Package org.apache.hadoop.hdfs.web

Examples of org.apache.hadoop.hdfs.web.WebHdfsFileSystem$OffsetUrlInputStream


    final String uri = WebHdfsFileSystem.SCHEME  + "://"
        + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
    //get file system as JobTracker
    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
        "JobTracker", new String[]{"user"});
    final WebHdfsFileSystem webhdfs = ugi.doAs(
        new PrivilegedExceptionAction<WebHdfsFileSystem>() {
      @Override
      public WebHdfsFileSystem run() throws Exception {
        return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
      }
    });

    { //test getDelegationToken(..)
      final Token<DelegationTokenIdentifier> token = webhdfs
          .getDelegationToken("JobTracker");
      DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
      byte[] tokenId = token.getIdentifier();
      identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
      LOG.info("A valid token should have non-null password, and should be renewed successfully");
      Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier));
      dtSecretManager.renewToken(token, "JobTracker");
      ugi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
          token.renew(config);
          token.cancel(config);
          return null;
        }
      });
    }

    { //test getDelegationTokens(..)
      final List<Token<?>> tokenlist = webhdfs.getDelegationTokens("JobTracker");
      DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
      @SuppressWarnings("unchecked")
      final Token<DelegationTokenIdentifier> token = (Token<DelegationTokenIdentifier>)tokenlist.get(0);
      byte[] tokenId = token.getIdentifier();
      identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
View Full Code Here


    final String uri = WebHdfsFileSystem.SCHEME  + "://"
        + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
    //get file system as JobTracker
    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
        "JobTracker", new String[]{"user"});
    final WebHdfsFileSystem webhdfs = ugi.doAs(
        new PrivilegedExceptionAction<WebHdfsFileSystem>() {
      @Override
      public WebHdfsFileSystem run() throws Exception {
        return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
      }
    });

    final Token<DelegationTokenIdentifier> token = webhdfs.getDelegationToken("JobTracker");
    DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
    byte[] tokenId = token.getIdentifier();
    identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
    LOG.info("A valid token should have non-null password, and should be renewed successfully");
    Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier));
View Full Code Here

    WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()");
    ((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
    ((Log4JLogger)ExceptionHandler.LOG).getLogger().setLevel(Level.ALL);
    final UserGroupInformation ugi = UserGroupInformation.createRemoteUser(REAL_USER);
    WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName());
    final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config);
   
    final Path root = new Path("/");
    cluster.getFileSystem().setPermission(root, new FsPermission((short)0777));

    {
      //test GETHOMEDIRECTORY with doAs
      final URL url = WebHdfsTestUtil.toUrl(webhdfs,
          GetOpParam.Op.GETHOMEDIRECTORY,  root, new DoAsParam(PROXY_USER));
      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
      final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK);
      conn.disconnect();
 
      final Object responsePath = m.get(Path.class.getSimpleName());
      WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
      Assert.assertEquals("/user/" + PROXY_USER, responsePath);
    }

    {
      //test GETHOMEDIRECTORY with DOas
      final URL url = WebHdfsTestUtil.toUrl(webhdfs,
          GetOpParam.Op.GETHOMEDIRECTORY,  root, new DoAsParam(PROXY_USER) {
            @Override
            public String getName() {
              return "DOas";
            }
      });
      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
      final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK);
      conn.disconnect();
 
      final Object responsePath = m.get(Path.class.getSimpleName());
      WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
      Assert.assertEquals("/user/" + PROXY_USER, responsePath);
    }

    final Path f = new Path("/testWebHdfsDoAs/a.txt");
    {
      //test create file with doAs
      final PutOpParam.Op op = PutOpParam.Op.CREATE;
      final URL url = WebHdfsTestUtil.toUrl(webhdfs, op,  f, new DoAsParam(PROXY_USER));
      WebHdfsTestUtil.LOG.info("url=" + url);
      HttpURLConnection conn = (HttpURLConnection) url.openConnection();
      conn = WebHdfsTestUtil.twoStepWrite(conn, op);
      final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096);
      out.write("Hello, webhdfs user!".getBytes());
      out.close();
 
      final FileStatus status = webhdfs.getFileStatus(f);
      WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner());
      Assert.assertEquals(PROXY_USER, status.getOwner());
    }

    {
      //test append file with doAs
      final PostOpParam.Op op = PostOpParam.Op.APPEND;
      final URL url = WebHdfsTestUtil.toUrl(webhdfs, op,  f, new DoAsParam(PROXY_USER));
      HttpURLConnection conn = (HttpURLConnection) url.openConnection();
      conn = WebHdfsTestUtil.twoStepWrite(conn, op);
      final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096);
      out.write("\nHello again!".getBytes());
      out.close();
 
      final FileStatus status = webhdfs.getFileStatus(f);
      WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner());
      WebHdfsTestUtil.LOG.info("status.getLen()  =" + status.getLen());
      Assert.assertEquals(PROXY_USER, status.getOwner());
    }
  }
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
    InputStream istream = webfs.open(file);
    int val = istream.read();
    istream.close();

    verifyAuditLogsRepeat(true, 3);
    assertTrue("failed to read from file", val >= 0);
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
    FileStatus st = webfs.getFileStatus(file);

    verifyAuditLogs(true);
    assertTrue("failed to stat file", st != null && st.isFile());
  }
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0600));
    fs.setOwner(file, "root", null);

    setupAuditLogs();
    try {
      WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
      InputStream istream = webfs.open(file);
      int val = istream.read();
      fail("open+read must not succeed, got " + val);
    } catch(AccessControlException E) {
      System.out.println("got access denied, as expected.");
    }
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
    InputStream istream = webfs.open(file);
    int val = istream.read();
    istream.close();

    verifyAuditLogsRepeat(true, 3);
    assertTrue("failed to read from file", val >= 0);
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0644));
    fs.setOwner(file, "root", null);

    setupAuditLogs();

    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
    FileStatus st = webfs.getFileStatus(file);

    verifyAuditLogs(true);
    assertTrue("failed to stat file", st != null && st.isFile());
  }
View Full Code Here

    fs.setPermission(file, new FsPermission((short)0600));
    fs.setOwner(file, "root", null);

    setupAuditLogs();
    try {
      WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf);
      InputStream istream = webfs.open(file);
      int val = istream.read();
      fail("open+read must not succeed, got " + val);
    } catch(AccessControlException E) {
      System.out.println("got access denied, as expected.");
    }
View Full Code Here

    final String uri = WebHdfsFileSystem.SCHEME  + "://"
        + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
    //get file system as JobTracker
    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
        "JobTracker", new String[]{"user"});
    final WebHdfsFileSystem webhdfs = ugi.doAs(
        new PrivilegedExceptionAction<WebHdfsFileSystem>() {
      @Override
      public WebHdfsFileSystem run() throws Exception {
        return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
      }
    });

    { //test addDelegationTokens(..)
      Credentials creds = new Credentials();
      final Token<?> tokens[] = webhdfs.addDelegationTokens("JobTracker", creds);
      Assert.assertEquals(1, tokens.length);
      Assert.assertEquals(1, creds.numberOfTokens());
      Assert.assertSame(tokens[0], creds.getAllTokens().iterator().next());
      checkTokenIdentifier(ugi, tokens[0]);
      final Token<?> tokens2[] = webhdfs.addDelegationTokens("JobTracker", creds);
      Assert.assertEquals(0, tokens2.length);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.web.WebHdfsFileSystem$OffsetUrlInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.