Package org.sonatype.tests.http.server.fluent

Examples of org.sonatype.tests.http.server.fluent.Server


    // bring up remote server using Jetty
    final PathRecorder recorder = new PathRecorder();
    final PrefixesFile prefixesFile = new PrefixesFile();
    prefixesFile.setContent(null);

    final Server server = Server
        .withPort(0)
        .serve("/*")
        .withBehaviours(
            recorder,
            prefixesFile,
            Behaviours.get(remoteRepoRoot)
        )
        .start();

    // create the proxy
    final MavenProxyRepository proxyRepository = repositories()
        .create(MavenProxyRepository.class, repositoryIdForTest("someorgProxy1"))
        .asProxyOf(server.getUrl().toExternalForm())
        .doNotDownloadRemoteIndexes()
        .save();

    routingTest().waitForAllRoutingUpdateJobToStop();
    // waitForWLPublishingOutcomes( proxyRepository.id() );
    client().getSubsystem(Scheduler.class).waitForAllTasksToStop();

    // nuke the repo cache
    nukeProxyCaches(proxyRepository.id());

    try {
      // clear recorder
      recorder.clear();
      // remote repo lives without prefix file
      {
        // check that newly added proxy is not publishing prefix file
        assertThat(routing().getStatus(proxyRepository.id()).getPublishedStatus(),
            equalTo(Outcome.FAILED));

        // and because no WL, we can fetch whatever we want (com and org)
        // all these will go remotely
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_POM, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_JAR, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_POM, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_JAR, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_POM, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_JAR, false);

        // note: sha1 is asked for existing files only
        // GET /hu/fluke/artifact/1.0/artifact-1.0.jar,
        // GET /hu/fluke/artifact/1.0/artifact-1.0.pom,
        // GET /org/someorg/artifact/1.0/artifact-1.0.jar.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.jar,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom,
        // GET /com/someorg/artifact/1.0/artifact-1.0.jar.sha1,
        // GET /com/someorg/artifact/1.0/artifact-1.0.jar,
        // GET /com/someorg/artifact/1.0/artifact-1.0.pom.sha1,
        // GET /com/someorg/artifact/1.0/artifact-1.0.pom,
        final List<String> requests = recorder.getPathsForVerb("GET");
        assertThat(requests.size(), is(10));
        assertThat(
            requests,
            containsInAnyOrder(COM_SOMEORG_ARTIFACT_10_POM, COM_SOMEORG_ARTIFACT_10_POM + ".sha1",
                COM_SOMEORG_ARTIFACT_10_JAR, COM_SOMEORG_ARTIFACT_10_JAR + ".sha1",
                ORG_SOMEORG_ARTIFACT_10_POM, ORG_SOMEORG_ARTIFACT_10_POM + ".sha1",
                ORG_SOMEORG_ARTIFACT_10_JAR, ORG_SOMEORG_ARTIFACT_10_JAR + ".sha1", FLUKE_ARTIFACT_POM,
                FLUKE_ARTIFACT_JAR));
      }

      // nuke the repo cache
      nukeProxyCaches(proxyRepository.id());

      // now set the prefixes file that contains /org/someorg prefix only, and repeat
      prefixesFile.setContent(Files.toString(testData().resolveFile("someorg-prefixes.txt"),
          Charset.forName("UTF-8")));

      // update the WL of proxy repo to have new prefixes file picked up
      routing().updatePrefixFile(proxyRepository.id());

      // wait for update to finish since it's async op, client above returned immediately
      // but update happens in a separate thread. Still this should be quick operation as prefix file is used
      Status proxyStatus = routing().getStatus(proxyRepository.id());
      // sit and wait for remote discovery (or the timeout Junit @Rule will kill us)
      while (proxyStatus.getPublishedStatus() != Outcome.SUCCEEDED) {
        Thread.sleep(10000);
        proxyStatus = routing().getStatus(proxyRepository.id());
      }

      // nuke the repo cache
      nukeProxyCaches(proxyRepository.id());

      // clear recorder
      recorder.clear();
      // repeat the test with slightly different expectations
      {
        // check that newly added proxy is publishing prefix file
        assertThat(routing().getStatus(proxyRepository.id()).getPublishedStatus(),
            equalTo(Outcome.SUCCEEDED));

        // and because we have WL, we cant fetch whatever we want (com and org)
        // only WL-enlisted of these will go remotely
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_POM, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_JAR, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_POM, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_JAR, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_POM, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_JAR, false);

        // GET /org/someorg/artifact/1.0/artifact-1.0.jar.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.jar,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom,
        final List<String> requests = recorder.getPathsForVerb("GET");
        assertThat(requests.size(), is(4));
        assertThat(
            requests,
            containsInAnyOrder(ORG_SOMEORG_ARTIFACT_10_POM, ORG_SOMEORG_ARTIFACT_10_POM + ".sha1",
                ORG_SOMEORG_ARTIFACT_10_JAR, ORG_SOMEORG_ARTIFACT_10_JAR + ".sha1"));
      }
    }
    finally {
      server.stop();
    }
  }
View Full Code Here


    final PathRecorder recorder = new PathRecorder();
    final PrefixesFile prefixesFile = new PrefixesFile();
    // now set the prefixes file that contains /org/someorg prefix only, and repeat
    prefixesFile.setContent(Files.toString(testData().resolveFile("someorg-prefixes.txt"),
        Charset.forName("UTF-8")));
    final Server server = Server
        .withPort(0)
        .serve("/*")
        .withBehaviours(
            recorder,
            prefixesFile,
            Behaviours.get(remoteRepoRoot)
        )
        .start();

    // create the proxy
    final MavenProxyRepository proxyRepository =
        repositories().create(MavenProxyRepository.class, repositoryIdForTest("someorgProxy1"))
            .asProxyOf(server.getUrl().toExternalForm())
            .doNotDownloadRemoteIndexes()
            .save();

    routingTest().waitForAllRoutingUpdateJobToStop();
    // waitForWLPublishingOutcomes( proxyRepository.id() );
    client().getSubsystem(Scheduler.class).waitForAllTasksToStop();

    // nuke the repo cache
    nukeProxyCaches(proxyRepository.id());

    try {
      // clear recorder
      recorder.clear();
      // repeat the test with slightly different expectations
      {
        // check that newly added proxy is publishing prefix file
        assertThat(routing().getStatus(proxyRepository.id()).getPublishedStatus(),
            equalTo(Outcome.SUCCEEDED));

        // and because we have WL, we cant fetch whatever we want (com and org)
        // only WL-enlisted of these will go remotely
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_POM, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_JAR, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_POM, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_JAR, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_POM, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_JAR, false);

        // GET /org/someorg/artifact/1.0/artifact-1.0.jar.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.jar,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom,
        final List<String> requests = recorder.getPathsForVerb("GET");
        assertThat(requests.size(), is(4));
        assertThat(
            requests,
            containsInAnyOrder(ORG_SOMEORG_ARTIFACT_10_POM, ORG_SOMEORG_ARTIFACT_10_POM + ".sha1",
                ORG_SOMEORG_ARTIFACT_10_JAR, ORG_SOMEORG_ARTIFACT_10_JAR + ".sha1"));
      }

      // now loose the prefixes file
      prefixesFile.setContent(null);

      // update the WL of proxy repo to have new prefixes file picked up
      routing().updatePrefixFile(proxyRepository.id());

      // wait for update to finish since it's async op, client above returned immediately
      // but update happens in a separate thread. Still this should be quick operation as prefix file is used
      Status proxyStatus = routing().getStatus(proxyRepository.id());
      // sit and wait for remote discovery (or the timeout Junit @Rule will kill us)
      while (proxyStatus.getPublishedStatus() != Outcome.FAILED) {
        Thread.sleep(10000);
        proxyStatus = routing().getStatus(proxyRepository.id());
      }

      // nuke the repo cache
      nukeProxyCaches(proxyRepository.id());

      // clear recorder
      recorder.clear();
      // remote repo lives without prefix file
      {
        // check that newly added proxy is not publishing prefix file
        assertThat(routing().getStatus(proxyRepository.id()).getPublishedStatus(),
            equalTo(Outcome.FAILED));

        // and because no WL, we can fetch whatever we want (com and org)
        // all these will go remotely
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_POM, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), COM_SOMEORG_ARTIFACT_10_JAR, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_POM, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), ORG_SOMEORG_ARTIFACT_10_JAR, true);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_POM, false);
        fetchAndAssert(downloadsDir, proxyRepository.id(), FLUKE_ARTIFACT_JAR, false);

        // note: sha1 is asked for existing files only
        // GET /hu/fluke/artifact/1.0/artifact-1.0.jar,
        // GET /hu/fluke/artifact/1.0/artifact-1.0.pom,
        // GET /org/someorg/artifact/1.0/artifact-1.0.jar.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.jar,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom.sha1,
        // GET /org/someorg/artifact/1.0/artifact-1.0.pom,
        // GET /com/someorg/artifact/1.0/artifact-1.0.jar.sha1,
        // GET /com/someorg/artifact/1.0/artifact-1.0.jar,
        // GET /com/someorg/artifact/1.0/artifact-1.0.pom.sha1,
        // GET /com/someorg/artifact/1.0/artifact-1.0.pom,
        final List<String> requests = recorder.getPathsForVerb("GET");
        log("Requests: {}", requests);

        assertThat(requests.size(), is(10));
        assertThat(
            requests,
            containsInAnyOrder(COM_SOMEORG_ARTIFACT_10_POM, COM_SOMEORG_ARTIFACT_10_POM + ".sha1",
                COM_SOMEORG_ARTIFACT_10_JAR, COM_SOMEORG_ARTIFACT_10_JAR + ".sha1",
                ORG_SOMEORG_ARTIFACT_10_POM, ORG_SOMEORG_ARTIFACT_10_POM + ".sha1",
                ORG_SOMEORG_ARTIFACT_10_JAR, ORG_SOMEORG_ARTIFACT_10_JAR + ".sha1", FLUKE_ARTIFACT_POM,
                FLUKE_ARTIFACT_JAR));
      }
    }
    finally {
      server.stop();
    }
  }
View Full Code Here

      throws Exception
  {
    final int port = ((M2TestsuiteEnvironmentBuilder)environmentBuilder()).server().getPort();
    environmentBuilder().stopService();

    final Server server = Server.withPort(port);
    server.serve("/*").withBehaviours(new DropConnection()).start();
    try {
      final Repository repository = getRepositoryRegistry().getRepository("repo1");
      final ResourceStoreRequest request =
          new ResourceStoreRequest("/activemq/activemq-core/1.2/activemq-core-1.2.jar");

      try {
        final StorageItem item = repository.retrieveItem(request);
        fail("We expected a LocalStorageEofException to be thrown");
      }
      catch (LocalStorageEOFException e) {
        // good, we expected this
      }
      finally {
        // now we have to ensure no remnant files exists
        assertThat(repository.getLocalStorage().containsItem(repository, request), is(false));
        // no tmp files should exists either
        assertThat(
            repository.getLocalStorage().listItems(repository, new ResourceStoreRequest("/.nexus/tmp")),
            is(empty()));
      }
    }
    finally {
      server.stop();
    }
  }
View Full Code Here

    // stop the healthy server
    int port = serverResource.getServerProvider().getPort();
    serverResource.getServerProvider().stop();

    // start a server which always return HTTP-500 for get
    Server return500Server = Server.withPort(port).serve("/*").withBehaviours(error(500));

    // download again
    try {
      downloadArtifact(getNexusTestRepoUrl(),
          "nexus1111", "artifact", "1.1", "jar", null, "target/downloads");
      Assert.fail("Should throw exception coz the remote is in a error status");
    }
    catch (Exception e) {
      // skip
    }

    // This commented stuff below makes IT unpredictable
    // By starting "healthy" server, repo will eventually unblock during ExpireCache task run (more than 20sec)
    // So, I commented this out, we have NFC ITs anyway (that's what following fetch would test)
    // -- cstamas

    // // stop the error server, start the healthy server
    // return500Server.stop();
    // server.start();
    //
    // try
    // {
    // downloadArtifact( "nexus1111", "artifact", "1.1", "jar", null, "target/downloads" );
    // Assert.fail( "Still fails before a clear cache." );
    // }
    // catch ( Exception e )
    // {
    // // skip
    // }

    // clear cache, then download
    ScheduledServicePropertyResource prop = new ScheduledServicePropertyResource();
    prop.setKey("repositoryId");
    prop.setValue(getTestRepositoryId());
    TaskScheduleUtil.runTask(ExpireCacheTaskDescriptor.ID, prop);

    try {
      // the proxy is now working <- NOT TRUE, it is auto blocked!
      downloadArtifact(getNexusTestRepoUrl(),
          "nexus1111", "artifact", "1.1", "jar", null, "target/downloads");
      Assert.fail("Should fail, since repository is in AutoBlock mode!");
    }
    catch (Exception e) {
      // skip
    }

    // check for auto block
    // TODO: interestingly RepositoryMessageUtil.getStatus() neglects JSON here, so
    // not using it and switched back to XML as it is wired in it this util class.
    RepositoryMessageUtil util = new RepositoryMessageUtil(this.getXMLXStream(), MediaType.APPLICATION_XML);

    RepositoryStatusResource status = util.getStatus(getTestRepositoryId());

    Assert.assertEquals("Repository should be auto-blocked", status.getProxyMode(), ProxyMode.BLOCKED_AUTO.name());

    // stop the error server, start the healthy server
    return500Server.stop();
    serverResource.getServerProvider().start();

    // unblock it manually
    // NEXUS-4410: since this issue is implemented, the lines below are not enough,
    // since NFC will still contain the artifact do be downloaded, so we need to make it manually blocked and then allow proxy
View Full Code Here

public class PageTest
    extends TestSupport
{
  @Test
  public void simpleCase() throws Exception {
    final Server server =
        Server.withPort(0).serve("/foo/bar/").withBehaviours(Behaviours.content("<html></html>"));
    server.start();
    try {
      final String repoRootUrl = server.getUrl().toString() + "/foo/bar/";
      final PageContext context = new PageContext(new DefaultHttpClient());
      final Page page = Page.getPageFor(context, repoRootUrl);
      assertThat(page.getUrl(), equalTo(repoRootUrl));
      assertThat(page.getHttpResponse().getStatusLine().getStatusCode(), equalTo(200));
      assertThat(page.getDocument(), notNullValue());
    }
    finally {
      server.stop();
    }
  }
View Full Code Here

    }
  }

  @Test
  public void error404WithBody() throws Exception {
    final Server server = Server.withPort(0).serve("/foo/bar/").withBehaviours(Behaviours.error(404));
    server.start();
    try {
      final String repoRootUrl = server.getUrl().toString() + "/foo/bar/";
      final PageContext context = new PageContext(new DefaultHttpClient());
      final Page page = Page.getPageFor(context, repoRootUrl);
      assertThat(page.getUrl(), equalTo(repoRootUrl));
      assertThat(page.getHttpResponse().getStatusLine().getStatusCode(), equalTo(404));
      assertThat(page.getDocument(), notNullValue());
    }
    finally {
      server.stop();
    }
  }
View Full Code Here

    }
  }

  @Test(expected = UnexpectedPageResponse.class)
  public void error500IsException() throws Exception {
    final Server server = Server.withPort(0).serve("/*").withBehaviours(Behaviours.error(500));
    server.start();
    try {
      final String repoRootUrl = server.getUrl().toString() + "/foo/bar/";
      final PageContext context = new PageContext(new DefaultHttpClient());
      final Page page = Page.getPageFor(context, repoRootUrl);
    }
    finally {
      server.stop();
    }
  }
View Full Code Here

  }

  @Test(expected = SocketException.class)
  public void errorConnectionRefusedException() throws Exception {
    final String repoRootUrl;
    final Server server = Server.withPort(0).serve("/*").withBehaviours(Behaviours.error(500));
    server.start();
    try {
      repoRootUrl = server.getUrl().toString() + "/foo/bar/";
    }
    finally {
      server.stop();
    }
    final PageContext context = new PageContext(new DefaultHttpClient());
    final Page page = Page.getPageFor(context, repoRootUrl);
  }
View Full Code Here

      when(repository.getId()).thenReturn("foo");
      when(repository.getName()).thenReturn("foo");
      when(repository.getRemoteStorageContext()).thenReturn(proxyContext);

      // a mock remote server that will simply "hang" to occupy the request socket
      final Server server =
          Server.withPort(0).serve("/").withBehaviours(Behaviours.pause(Time.days(1))).start();
      // the URL we will try to connect to
      final String url = "http://foo.com:" + server.getPort() + "/foo/bar.jar";
      // the requesting logic packed as Runnable
      final Runnable request = new RequesterRunnable(underTest, repository, url);
      try {
        // we fire 1st request as a Thread, this thread will be blocked as Server will simply "pause"
        // this also means, that connection stays leased from pool, and since pool size is 1, we
        // intentionally depleted the connection pool (reached max connection count)
        final Thread blockedThread = new Thread(request);
        blockedThread.start();

        // give some time to thread above
        Thread.sleep(200);

        try {
          // in current thread we try to establish 2nd connection
          // this here will need to fail, as connection pool is depleted
          // ConnectionPoolTimeoutException should be thrown by HC4
          // that RRS "repackages" into RemoteStorageTransportOverloadedException
          request.run();

          // fail if no exception
          Assert.fail("RemoteStorageTransportOverloadedException expected!");
        }
        catch (IllegalStateException e) {
          Assert.assertNotNull("We except the cause be RemoteStorageTransportOverloadedException!",
              e.getCause());
          Assert.assertEquals(RemoteStorageTransportOverloadedException.class, e.getCause().getClass());
        }
      }
      finally {
        server.stop();
      }
    }
    finally {
      if (httpClientFactory != null) {
        httpClientFactory.shutdown();
View Full Code Here

  @Test
  public void simple200()
      throws Exception
  {
    final Server server = prepareServer(200);
    server.start();
    try {
      final HttpClient httpClient = new DefaultHttpClient();
      final String repoRoot = server.getUrl().toString() + "/trunk/somerepo/";
      when(mavenProxyRepository.getRemoteUrl()).thenReturn(repoRoot);
      final ScrapeContext context = new ScrapeContext(mavenProxyRepository, httpClient, 2);
      final Page page = Page.getPageFor(context, repoRoot);
      getScraper().scrape(context, page);
      assertThat(context.isStopped(), is(true));
      assertThat(context.isSuccessful(), is(true));
      assertThat(context.getPrefixSource(), notNullValue());
      final List<String> entries = context.getPrefixSource().readEntries();
      assertThat(entries, notNullValue());
      assertThat(entries.size(), equalTo(2));
      assertThat(entries, contains("/com/foo", "/com/bar"));
    }
    finally {
      server.stop();
    }
  }
View Full Code Here

TOP

Related Classes of org.sonatype.tests.http.server.fluent.Server

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.