Package com.flaptor.util.remote

Examples of com.flaptor.util.remote.WebServer


            xmlRpcServer.start();
        }
        if (openSearch || web || xml) {
            Config config = Config.getConfig("searcher.properties");
            int httpServerPort = PortUtil.getPort("searcher.http");
            httpServer = new WebServer(httpServerPort);

            if (openSearch) {
                String context = config.getString("opensearch.context");
                logger.info("MultipleRpcSearcher constructor: starting OpenSearch searcher on port " + httpServerPort + " context "+context);
                httpServer.addHandler(context, new OpenSearchHandler(baseSearcher));
View Full Code Here


        Page out, in;
        in = PageTest.randomPage();
        in.setUrl(url);

        WebServer server = null;
        Crawler crawler = null;
        try {
            server = new WebServer(8085);
            server.addResourceHandler("/", tmpDir+"/web");
            server.start();

            PageDB db = new PageDB(tmpDir+"/testdb");
            db.open(PageDB.WRITE);
            db.addPage(in);
            db.close();

            crawler = new Crawler();

            int tries = 0;
            int maxTries = 10;
            do {
                tries++;

                crawler.crawl(1);

                db.open(PageDB.READ);
                Iterator<Page> pages = db.iterator();
                assertTrue("The crawler lost or discarded the test page", pages.hasNext());
                out = pages.next();
                assertFalse("The crawler has more than the test page", pages.hasNext());
                db.close();
            } while (out.getRetries() > 0 && tries <= maxTries);

        } finally {
            if (null != crawler) {
                crawler.cleanup();
            }
            server.requestStop();
            while (! server.isStopped()) {
                Execute.sleep(20);
            }
        }

        assertTrue("Test page url changed", in.getUrl().equals(out.getUrl()));
View Full Code Here

        config.set("keep.original.url.on.redirect", "true");

        TestUtils.writeFile(tmpDir+"/web/one.html", "<a href='page two.html?a <b> c#d'>two</a>");
        TestUtils.writeFile(tmpDir+"/web/page two.html", "content");
       
        WebServer server = null;
        Crawler crawler = null;

        Page in, one, two;
        in = PageTest.randomPage();
        in.setUrl(url);
       
        try {
            server = new WebServer(8087);
            server.addResourceHandler("/", tmpDir+"/web");
            server.start();

            PageDB db = new PageDB(tmpDir+"/testdb");
            db.open(PageDB.WRITE);
            db.addPage(in);
            db.close();

            crawler = new Crawler();

            crawler.crawl(2);

            db.open(PageDB.READ);
            Iterator<Page> pages = db.iterator();
            assertTrue("The crawler lost or discarded all test pages", pages.hasNext());
            one = pages.next();
            assertTrue("The crawler lost or discarded the second test page", pages.hasNext());
            two = pages.next();
            assertFalse("The crawler has more than two pages", pages.hasNext());
            db.close();
        } finally {
            if (null != crawler) {
                crawler.cleanup();
            }
            server.requestStop();
            while (!server.isStopped()) {
                Execute.sleep(20);
            }
        }

        assertTrue("Failed in fetching both test pages", (one.getLastSuccess() > 0) && (two.getLastSuccess() > 0));
View Full Code Here

            // Store the data for later recall
            testPages.put(url,text);
        }

        // Start the web server
        WebServer server = new WebServer(8086);
        server.addResourceHandler("/", tmpDir+"/web");
        server.start();

        // Run the fetcher
        FetchData fetchdata = fetcher.fetch(fetchlist);

        // Stop the web server
        server.requestStop();
        for ( int i = 0; i < 100; i++) {
            if (server.isStopped()) break;
            com.flaptor.util.Execute.sleep(1000);
        }

        // Chech that the page has been fetched
        for (FetchDocument doc : fetchdata) {
View Full Code Here

TOP

Related Classes of com.flaptor.util.remote.WebServer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.