Package org.apache.hadoop.http

Examples of org.apache.hadoop.http.HttpServer$QuotingInputFilter$RequestQuoter


  }

  public void setUp() throws Exception {
    new File(System.getProperty("build.webapps", "build/webapps") + "/test"
        ).mkdirs();
    server = new HttpServer("test", "0.0.0.0", 0, true);
    server.addServlet("delay", "/delay", DelayServlet.class);
    server.addServlet("jobend", "/jobend", JobEndServlet.class);
    server.addServlet("fail", "/fail", FailServlet.class);
    server.start();
    int port = server.getPort();
View Full Code Here


          } else {
            LOG.error("dev mode does NOT work with ephemeral port!");
            System.exit(1);
          }
        }
        HttpServer server =
            new HttpServer(name, bindAddress, port, findPort, conf,
            new AdminACLsManager(conf).getAdminAcl(), null, webapp.getServePathSpecs());
        for(ServletStruct struct: servlets) {
          server.addServlet(struct.name, struct.spec, struct.clazz);
        }
        for(Map.Entry<String, Object> entry : attributes.entrySet()) {
          server.setAttribute(entry.getKey(), entry.getValue());
        }
        server.addGlobalFilter("guice", GuiceFilter.class.getName(), null);
        webapp.setConf(conf);
        webapp.setHttpServer(server);
        server.start();
        LOG.info("Web app /"+ name +" started at "+ server.getPort());
      } catch (ClassNotFoundException e) {
        throw new WebAppException("Error starting http server", e);
      } catch (IOException e) {
        throw new WebAppException("Error starting http server", e);
      }
View Full Code Here

    // Initialize other scheduling parameters from the configuration
    checkpointConf = new CheckpointConf(conf);

    // initialize the webserver for uploading files.
    int tmpInfoPort = infoSocAddr.getPort();
    infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
                                tmpInfoPort == 0, conf,
                                new AccessControlList(conf.get(DFS_ADMIN, " "))) {
      {
        if (UserGroupInformation.isSecurityEnabled()) {
          initSpnego(
View Full Code Here

  private static final byte[] FAKE_LOG_DATA = TestEditLog.HADOOP20_SOME_EDITS;

  @Test
  public void testReadURL() throws Exception {
    // Start a simple web server which hosts the log data.
    HttpServer server = new HttpServer("test", "0.0.0.0", 0, true);
    server.start();
    try {
      server.addServlet("fakeLog", "/fakeLog", FakeLogServlet.class);
      URL url = new URL("http://localhost:" + server.getPort() + "/fakeLog");
      EditLogInputStream elis = EditLogFileInputStream.fromUrl(
          url, HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID,
          false);
      // Read the edit log and verify that we got all of the data.
      EnumMap<FSEditLogOpCodes, Holder<Integer>> counts =
          FSImageTestUtil.countEditLogOpTypes(elis);
      assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
      assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
      assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

      // Check that length header was picked up.
      assertEquals(FAKE_LOG_DATA.length, elis.length());
      elis.close();
    } finally {
      server.stop();
    }
  }
View Full Code Here

    // create a servlet to serve full-file content
    InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
    String infoHost = infoSocAddr.getHostName();
    int tmpInfoPort = infoSocAddr.getPort();
    this.infoServer = (secureResources == null)
       ? new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
           conf, new AccessControlList(conf.get(DFS_ADMIN, " ")))
       : new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
           conf, new AccessControlList(conf.get(DFS_ADMIN, " ")),
           secureResources.getListener());
    LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
    if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
      boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
View Full Code Here

 
  public void start() throws IOException {
    final String infoHost = bindAddress.getHostName();
    int infoPort = bindAddress.getPort();

    httpServer = new HttpServer("hdfs", infoHost, infoPort,
                                infoPort == 0, conf,
                                new AccessControlList(conf.get(DFS_ADMIN, " "))) {
      {
        // Add SPNEGO support to NameNode
        if (UserGroupInformation.isSecurityEnabled()) {
View Full Code Here

                                "dfs.secondary.info.port",
                                "dfs.secondary.http.address");
    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
    infoBindAddress = infoSocAddr.getHostName();
    int tmpInfoPort = infoSocAddr.getPort();
    infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
        tmpInfoPort == 0, conf);
    infoServer.setAttribute("name.system.image", checkpointImage);
    this.infoServer.setAttribute("name.conf", conf);
    infoServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class);
    infoServer.start();
View Full Code Here

                              "dfs.datanode.info.port",
                              "dfs.datanode.http.address");
    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
    String infoHost = infoSocAddr.getHostName();
    int tmpInfoPort = infoSocAddr.getPort();
    this.infoServer = new HttpServer("datanode", infoHost, tmpInfoPort,
        tmpInfoPort == 0, conf);
    InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(
        conf.get("dfs.datanode.https.address", infoHost + ":" + 0));
    Configuration sslConf = new Configuration(conf);
    sslConf.addResource(conf.get("https.keystore.info.rsrc", "sslinfo.xml"));
View Full Code Here

      NetUtils.getServerAddress(conf, "dfs.info.bindAddress",
                                "dfs.info.port", "dfs.http.address");
    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
    String infoHost = infoSocAddr.getHostName();
    int tmpInfoPort = infoSocAddr.getPort();
    this.infoServer = new HttpServer("hdfs", infoHost, tmpInfoPort,
        tmpInfoPort == 0, conf);
    InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(
        conf.get("dfs.https.address", infoHost + ":" + 0));
    Configuration sslConf = new Configuration(conf);
    sslConf.addResource(conf.get("https.keystore.info.rsrc", "sslinfo.xml"));
View Full Code Here

    // create a servlet to serve full-file content
    InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
    String infoHost = infoSocAddr.getHostName();
    int tmpInfoPort = infoSocAddr.getPort();
    this.infoServer = (secureResources == null)
       ? new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
           conf, new AccessControlList(conf.get(DFS_ADMIN, " ")))
       : new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
           conf, new AccessControlList(conf.get(DFS_ADMIN, " ")),
           secureResources.getListener());
    LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
    if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
      boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
View Full Code Here

TOP

Related Classes of org.apache.hadoop.http.HttpServer$QuotingInputFilter$RequestQuoter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.