Package net.yacy.cora.protocol.http

Examples of net.yacy.cora.protocol.http.HTTPClient


        final RequestHeader reqHeader = new RequestHeader();
        reqHeader.put(HeaderFramework.PRAGMA, "no-cache");
        reqHeader.put(HeaderFramework.CACHE_CONTROL, "no-cache");
        reqHeader.put(HeaderFramework.USER_AGENT, ClientIdentification.getUserAgent());
        final HTTPClient client = new HTTPClient();
        client.setHeader(reqHeader.entrySet());
        client.setTimout((int) getConfigLong("bootstrapLoadTimeout", 20000));

        yacyCore.log.logInfo("BOOTSTRAP: " + sc + " seeds known from previous run");

        // - use the superseed to further fill up the seedDB
        int ssc = 0, c = 0;
        while (true) {
            if (Thread.currentThread().isInterrupted()) {
                break;
            }
            seedListFileURL = sb.getConfig("network.unit.bootstrap.seedlist" + c, "");
            if (seedListFileURL.length() == 0) {
                break;
            }
            c++;
            if (
                    seedListFileURL.startsWith("http://") ||
                    seedListFileURL.startsWith("https://")
            ) {
                // load the seed list
                try {

                    url = new DigestURI(seedListFileURL);
                    //final long start = System.currentTimeMillis();
                    client.HEADResponse(url.toString());
                    header = new ResponseHeader(client.getHttpResponse().getAllHeaders());
                    //final long loadtime = System.currentTimeMillis() - start;
                    /*if (header == null) {
                        if (loadtime > getConfigLong("bootstrapLoadTimeout", 6000)) {
                            yacyCore.log.logWarning("BOOTSTRAP: seed-list URL " + seedListFileURL + " not available, time-out after " + loadtime + " milliseconds");
                        } else {
                            yacyCore.log.logWarning("BOOTSTRAP: seed-list URL " + seedListFileURL + " not available, no content");
                        }
                    } else*/ if (header.lastModified() == null) {
                        yacyCore.log.logWarning("BOOTSTRAP: seed-list URL " + seedListFileURL + " not usable, last-modified is missing");
                    } else if ((header.age() > 86400000) && (ssc > 0)) {
                        yacyCore.log.logInfo("BOOTSTRAP: seed-list URL " + seedListFileURL + " too old (" + (header.age() / 86400000) + " days)");
                    } else {
                        ssc++;
                        final byte[] content = client.GETbytes(url);
                        enu = FileUtils.strings(content);
                        lc = 0;
                        while (enu.hasNext()) {
                            try {
                                ys = yacySeed.genRemoteSeed(enu.next(), null, false, null);
View Full Code Here


     * @return
     */
    public static Map<String, String> loadFileAsMap(final DigestURI url) {
        final RequestHeader reqHeader = new RequestHeader();
        reqHeader.put(HeaderFramework.USER_AGENT, ClientIdentification.getUserAgent());
        final HTTPClient client = new HTTPClient();
        client.setHeader(reqHeader.entrySet());
        try {
            // sending request
            final Map<String, String> result = FileUtils.table(client.GETbytes(url));
            return (result == null) ? new HashMap<String, String>() : result;
        } catch (final Exception e) {
            Log.logException(e);
            return new HashMap<String, String>();
        }
View Full Code Here

        // send 'wget' to web interface
        final RequestHeader requestHeader = new RequestHeader();
        requestHeader.put(RequestHeader.AUTHORIZATION, "realm=" + encodedPassword); // for http-authentify
//        final Client con = new Client(10000, requestHeader);
        final HTTPClient con = new HTTPClient();
        con.setHeader(requestHeader.entrySet());
//        ResponseContainer res = null;
        try {
//            res = con.GET("http://localhost:"+ port +"/" + path);
            con.GETbytes("http://localhost:"+ port +"/" + path);

            // read response
//            if (res.getStatusLine().startsWith("2")) {
            if (con.getStatusCode() > 199 && con.getStatusCode() < 300) {
                Log.logConfig("COMMAND-STEERING", "YACY accepted steering command: " + processdescription);
//                final ByteArrayOutputStream bos = new ByteArrayOutputStream(); //This is stream is not used???
//                try {
//                    FileUtils.copyToStream(new BufferedInputStream(res.getDataAsStream()), new BufferedOutputStream(bos));
//                } finally {
//                    res.closeStream();
//                }
            } else {
//                Log.logSevere("COMMAND-STEERING", "error response from YACY socket: " + res.getStatusLine());
              Log.logSevere("COMMAND-STEERING", "error response from YACY socket: " + con.getHttpResponse().getStatusLine());
                System.exit(-1);
            }
        } catch (final IOException e) {
            Log.logSevere("COMMAND-STEERING", "could not establish connection to YACY socket: " + e.getMessage());
            System.exit(-1);
View Full Code Here

           
        }
       
        // setup http-client
        //TODO: adding Traffic statistic for robots download?
        final HTTPClient client = new HTTPClient();
        client.setHeader(reqHeaders.entrySet());
        try {
            // check for interruption
            if (Thread.currentThread().isInterrupted()) throw new InterruptedException("Shutdown in progress.");
           
            // sending the get request
            robotsTxt = client.GETbytes(robotsURL);
            // statistics:
            if (robotsTxt != null) {
              ByteCount.addAccountCount(ByteCount.CRAWLER, robotsTxt.length);
            }
            final int code = client.getHttpResponse().getStatusLine().getStatusCode();
            final ResponseHeader header = new ResponseHeader(client.getHttpResponse().getAllHeaders());
           
            // check the response status
            if (code > 199 && code < 300) {
              if (!header.mime().startsWith("text/plain")) {
                    robotsTxt = null;
                    log.info("Robots.txt from URL '" + robotsURL + "' has wrong mimetype '" + header.mime() + "'.");
                } else {

                    // getting some metadata
                  eTag = header.containsKey(HeaderFramework.ETAG)?(header.get(HeaderFramework.ETAG)).trim():null;
                    lastMod = header.lastModified();
                   
                    // if the robots.txt file was not changed we break here
                    if ((eTag != null) && (oldEtag != null) && (eTag.equals(oldEtag))) {
                        if (log.isDebugEnabled()) log.debug("Robots.txt from URL '" + robotsURL + "' was not modified. Abort downloading of new version.");
                        return null;
                    }
                   
                   
                    downloadEnd = System.currentTimeMillis();                   
                    if (log.isDebugEnabled()) log.debug("Robots.txt successfully loaded from URL '" + robotsURL + "' in " + (downloadEnd-downloadStart) + " ms.");
                }
            } else if (code == 304) {
                return null;
            } else if (code > 299 && code < 400) {
                // getting redirection URL
              String redirectionUrlString = header.get(HeaderFramework.LOCATION);
                if (redirectionUrlString==null) {
                    if (log.isDebugEnabled())
                    log.debug("robots.txt could not be downloaded from URL '" + robotsURL + "' because of missing redirecton header. [" + client.getHttpResponse().getStatusLine() + "].");
                    robotsTxt = null;                   
                } else {
               
                    redirectionUrlString = redirectionUrlString.trim();
                   
                    // generating the new URL object
                    final MultiProtocolURI redirectionUrl = MultiProtocolURI.newURL(robotsURL, redirectionUrlString);     
                   
                    // following the redirection
                    if (log.isDebugEnabled()) log.debug("Redirection detected for robots.txt with URL '" + robotsURL + "'." +
                            "\nRedirecting request to: " + redirectionUrl);
                    return downloadRobotsTxt(redirectionUrl,redirectionCount,entry);
                }
            } else if (code == 401 || code == 403) {
                accessCompletelyRestricted = true;
                if (log.isDebugEnabled()) log.debug("Access to Robots.txt not allowed on URL '" + robotsURL + "'.");
            } else {
              if (log.isDebugEnabled())
                log.debug("robots.txt could not be downloaded from URL '" + robotsURL + "'. [" + client.getHttpResponse().getStatusLine() + "].");
                robotsTxt = null;
            }       
        } catch (final Exception e) {
            throw e;
        }
View Full Code Here

            byte[] b = client.get(this.path);
            client.CLOSE();
            return new ByteArrayInputStream(b);
        }
        if (isHTTP() || isHTTPS()) {
                final HTTPClient client = new HTTPClient();
                client.setTimout(timeout);
                client.setUserAgent(userAgent);
                client.setHost(this.getHost());
                return new ByteArrayInputStream(client.GETbytes(this));
        }
       
        return null;
    }
View Full Code Here

            byte[] b = client.get(this.path);
            client.CLOSE();
            return b;
        }
        if (isHTTP() || isHTTPS()) {
                final HTTPClient client = new HTTPClient();
                client.setTimout(timeout);
                client.setUserAgent(userAgent);
                client.setHost(this.getHost());
                return client.GETbytes(this);
        }
       
        return null;
    }
View Full Code Here

            parts.put("maximumRecords", UTF8.StringBody(Long.toString(maximumRecords)));
            parts.put("verify", cacheStrategy == null ? UTF8.StringBody("false") : UTF8.StringBody(cacheStrategy.toName()));
            parts.put("resource", UTF8.StringBody(global ? "global" : "local"));
            parts.put("nav", UTF8.StringBody("none"));
            // result = HTTPConnector.getConnector(userAgent == null ? MultiProtocolURI.yacybotUserAgent : userAgent).post(new MultiProtocolURI(rssSearchServiceURL), (int) timeout, uri.getHost(), parts);
            final HTTPClient httpClient = new HTTPClient(userAgent == null ? ClientIdentification.getUserAgent() : userAgent, (int) timeout);
            result = httpClient.POSTbytes(new MultiProtocolURI(rssSearchServiceURL), uri.getHost(), parts, false);

            final RSSReader reader = RSSReader.parse(RSSFeed.DEFAULT_MAXSIZE, result);
            if (reader == null) {
                throw new IOException("cora.Search failed asking peer '" + uri.getHost() + "': probably bad response from remote peer (1), reader == null");
            }
View Full Code Here

        requestHeader.put(HeaderFramework.ACCEPT_LANGUAGE, sb.getConfig("crawler.http.acceptLanguage", DEFAULT_LANGUAGE));
        requestHeader.put(HeaderFramework.ACCEPT_CHARSET, sb.getConfig("crawler.http.acceptCharset", DEFAULT_CHARSET));
        requestHeader.put(HeaderFramework.ACCEPT_ENCODING, sb.getConfig("crawler.http.acceptEncoding", DEFAULT_ENCODING));

        // HTTP-Client
        final HTTPClient client = new HTTPClient();
        client.setRedirecting(false); // we want to handle redirection ourselves, so we don't index pages twice
        client.setTimout(socketTimeout);
        client.setHeader(requestHeader.entrySet());
            // send request
          final byte[] responseBody = client.GETbytes(url, maxFileSize);
          final ResponseHeader header = new ResponseHeader(client.getHttpResponse().getAllHeaders());
          final int code = client.getHttpResponse().getStatusLine().getStatusCode();

          if (code > 299 && code < 310) {
            // redirection (content may be empty)
                if (header.containsKey(HeaderFramework.LOCATION)) {
                    // getting redirection URL
                  String redirectionUrlString = header.get(HeaderFramework.LOCATION);
                    redirectionUrlString = redirectionUrlString.trim();

                    if (redirectionUrlString.length() == 0) {
                        sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, "redirection header empy", code);
                        throw new IOException("CRAWLER Redirection of URL=" + request.url().toString() + " aborted. Location header is empty.");
                    }
                   
                    // normalizing URL
                    final DigestURI redirectionUrl = new DigestURI(MultiProtocolURI.newURL(request.url(), redirectionUrlString));

                    // restart crawling with new url
                    this.log.logInfo("CRAWLER Redirection detected ('" + client.getHttpResponse().getStatusLine() + "') for URL " + request.url().toString());
                    this.log.logInfo("CRAWLER ..Redirecting request to: " + redirectionUrl);

                    // if we are already doing a shutdown we don't need to retry crawling
                    if (Thread.currentThread().isInterrupted()) {
                        sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.FINAL_LOAD_CONTEXT, "server shutdown", code);
                        throw new IOException("CRAWLER Retry of URL=" + request.url().toString() + " aborted because of server shutdown.");
                    }
                   
                    // check if the url was already indexed
                    final String dbname = sb.urlExists(Segments.Process.LOCALCRAWLING, redirectionUrl.hash());
                    if (dbname != null) {
                        sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, "redirection to double content", code);
                        throw new IOException("CRAWLER Redirection of URL=" + request.url().toString() + " ignored. The url appears already in db " + dbname);
                    }
                   
                    // retry crawling with new url
                    request.redirectURL(redirectionUrl);
                    return load(request, retryCount - 1, maxFileSize, checkBlacklist);
                } else {
                  // no redirection url provided
                    sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, "no redirection url provided", code);
                    throw new IOException("REJECTED EMTPY REDIRECTION '" + client.getHttpResponse().getStatusLine() + "' for URL " + request.url().toString());
                }
            } else if (responseBody == null) {
              // no response, reject file
                sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, "no response body", code);
                throw new IOException("REJECTED EMPTY RESPONSE BODY '" + client.getHttpResponse().getStatusLine() + "' for URL " + request.url().toString());
          } else if (code == 200 || code == 203) {
                // the transfer is ok
               
                // we write the new cache entry to file system directly
                long contentLength = responseBody.length;
                ByteCount.addAccountCount(ByteCount.CRAWLER, contentLength);

                // check length again in case it was not possible to get the length before loading
                if (maxFileSize > 0 && contentLength > maxFileSize) {
                  sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.FINAL_PROCESS_CONTEXT, "file size limit exceeded", code);                   
                  throw new IOException("REJECTED URL " + request.url() + " because file size '" + contentLength + "' exceeds max filesize limit of " + maxFileSize + " bytes. (GET)");
                }

                // create a new cache entry
                final CrawlProfile profile = sb.crawler.getActive(request.profileHandle().getBytes());
                response = new Response(
                        request,
                        requestHeader,
                        header,
                        Integer.toString(code),
                        profile,
                        responseBody
                );

                return response;
          } else {
                // if the response has not the right response type then reject file
              sb.crawlQueues.errorURL.push(request, sb.peers.mySeed().hash.getBytes(), new Date(), 1, FailCategory.TEMPORARY_NETWORK_FAILURE, "wrong http status code", code);
                throw new IOException("REJECTED WRONG STATUS TYPE '" + client.getHttpResponse().getStatusLine() + "' for URL " + request.url().toString());
            }
    }
View Full Code Here

        requestHeader.put(HeaderFramework.USER_AGENT, ClientIdentification.getUserAgent());
        requestHeader.put(HeaderFramework.ACCEPT_LANGUAGE, DEFAULT_LANGUAGE);
        requestHeader.put(HeaderFramework.ACCEPT_CHARSET, DEFAULT_CHARSET);
        requestHeader.put(HeaderFramework.ACCEPT_ENCODING, DEFAULT_ENCODING);

        final HTTPClient client = new HTTPClient();
        client.setTimout(20000);
        client.setHeader(requestHeader.entrySet());
          final byte[] responseBody = client.GETbytes(request.url(), Long.MAX_VALUE);
          final ResponseHeader header = new ResponseHeader(client.getHttpResponse().getAllHeaders());
          final int code = client.getHttpResponse().getStatusLine().getStatusCode();
            // FIXME: 30*-handling (bottom) is never reached
            // we always get the final content because httpClient.followRedirects = true

          if (responseBody != null && (code == 200 || code == 203)) {
                // the transfer is ok
           
            //statistics:
            ByteCount.addAccountCount(ByteCount.CRAWLER, responseBody.length);
               
                // we write the new cache entry to file system directly

                // create a new cache entry
                response = new Response(
                        request,
                        requestHeader,
                        header,
                        Integer.toString(code),
                        null,
                        responseBody
                );

                return response;
            } else if (code > 299 && code < 310) {
                if (header.containsKey(HeaderFramework.LOCATION)) {
                    // getting redirection URL
                  String redirectionUrlString = header.get(HeaderFramework.LOCATION);
                    redirectionUrlString = redirectionUrlString.trim();

                    if (redirectionUrlString.length() == 0) {
                        throw new IOException("CRAWLER Redirection of URL=" + request.url().toString() + " aborted. Location header is empty.");
                    }
                   
                    // normalizing URL
                    final DigestURI redirectionUrl = new DigestURI(MultiProtocolURI.newURL(request.url(), redirectionUrlString));

                   
                    // if we are already doing a shutdown we don't need to retry crawling
                    if (Thread.currentThread().isInterrupted()) {
                        throw new IOException("CRAWLER Retry of URL=" + request.url().toString() + " aborted because of server shutdown.");
                    }
                   
                    // retry crawling with new url
                    request.redirectURL(redirectionUrl);
                    return load(request, retryCount - 1);
                }
            } else {
                // if the response has not the right response type then reject file
              throw new IOException("REJECTED WRONG STATUS TYPE '" + client.getHttpResponse().getStatusLine() + "' for URL " + request.url().toString());
            }
        return response;
    }
View Full Code Here

            modifyProxyHeaders(requestHeader, httpVer);

            final String connectHost = hostPart(host, port, yAddress);
            final String getUrl = "http://"+ connectHost + remotePath;

            final HTTPClient client = setupHttpClient(requestHeader, connectHost);

            // send request
            try {
              client.GET(getUrl);
                if (log.isFinest()) log.logFinest(reqID +"    response status: "+ client.getHttpResponse().getStatusLine());
                conProp.put(HeaderFramework.CONNECTION_PROP_CLIENT_REQUEST_HEADER, requestHeader);

                final ResponseHeader responseHeader = new ResponseHeader(client.getHttpResponse().getAllHeaders());
                // determine if it's an internal error of the httpc
                if (responseHeader.isEmpty()) {
                  throw new Exception(client.getHttpResponse().getStatusLine().toString());
                }

                final ChunkedOutputStream chunkedOut = setTransferEncoding(conProp, responseHeader, client.getHttpResponse().getStatusLine().getStatusCode(), respond);

                // the cache does either not exist or is (supposed to be) stale
                long sizeBeforeDelete = -1;
                if (cachedResponseHeader != null) {
                    // delete the cache
                    final ResponseHeader rh = Cache.getResponseHeader(url.hash());
                    if (rh != null && (sizeBeforeDelete = rh.getContentLength()) == 0) {
                        final byte[] b = Cache.getContent(url.hash());
                        if (b != null) sizeBeforeDelete = b.length;
                    }
                    Cache.delete(url);
                    conProp.put(HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_CODE, "TCP_REFRESH_MISS");
                }

                // reserver cache entry
                final Request request = new Request(
                  null,
                        url,
                        requestHeader.referer() == null ? null : new DigestURI(requestHeader.referer()).hash(),
                        "",
                        responseHeader.lastModified(),
                        sb.crawler.defaultProxyProfile.handle(),
                        0,
                        0,
                        0,
                        sizeBeforeDelete < 0 ? 0 : sizeBeforeDelete);


                // handle incoming cookies
                handleIncomingCookies(responseHeader, host, ip);

//                prepareResponseHeader(responseHeader, res.getHttpVer());
                prepareResponseHeader(responseHeader, client.getHttpResponse().getProtocolVersion().toString());

                // sending the respond header back to the client
                if (chunkedOut != null) {
                    responseHeader.put(HeaderFramework.TRANSFER_ENCODING, "chunked");
                }

                if (log.isFinest()) log.logFinest(reqID +"    sending response header: "+ responseHeader);
                HTTPDemon.sendRespondHeader(
                        conProp,
                        respond,
                        httpVer,
                        client.getHttpResponse().getStatusLine().getStatusCode(),
                        client.getHttpResponse().getStatusLine().toString(), // status text
                        responseHeader);

                if (hasBody(client.getHttpResponse().getStatusLine().getStatusCode())) {

                    final OutputStream outStream = chunkedOut != null ? chunkedOut : respond;
                    final Response response = new Response(
                            request,
                            requestHeader,
                            responseHeader,
                            Integer.toString(client.getHttpResponse().getStatusLine().getStatusCode()),
                            sb.crawler.defaultProxyProfile
                    );
                    final String storeError = response.shallStoreCacheForProxy();
                    final boolean storeHTCache = response.profile().storeHTCache();
                    final String supportError = TextParser.supports(response.url(), response.getMimeType());
                    if (
                            /*
                             * Now we store the response into the htcache directory if
                             * a) the response is cacheable AND
                             */
                            (storeError == null) &&
                            /*
                             * b) the user has configured to use the htcache OR
                             * c) the content should be indexed
                             */
                            ((storeHTCache) || (supportError != null))
                    ) {
                        // we don't write actually into a file, only to RAM, and schedule writing the file.
//                        int l = res.getResponseHeader().size();
                      final int l = responseHeader.size();
                        final ByteArrayOutputStream byteStream = new ByteArrayOutputStream((l < 32) ? 32 : l);

                        final OutputStream toClientAndMemory = new MultiOutputStream(new OutputStream[] {outStream, byteStream});
//                        FileUtils.copy(res.getDataAsStream(), toClientAndMemory);
                        client.writeTo(toClientAndMemory);
                        // cached bytes
                        byte[] cacheArray;
                        if (byteStream.size() > 0) {
                            cacheArray = byteStream.toByteArray();
                        } else {
                            cacheArray = null;
                        }
                        if (log.isFine()) log.logFine(reqID +" writeContent of " + url + " produced cacheArray = " + ((cacheArray == null) ? "null" : ("size=" + cacheArray.length)));

                        if (sizeBeforeDelete == -1) {
                            // totally fresh file
                            response.setContent(cacheArray);
                            try {
                                Cache.store(response.url(), response.getResponseHeader(), cacheArray);
                                sb.toIndexer(response);
                            } catch (final IOException e) {
                                log.logWarning("cannot write " + response.url() + " to Cache (1): " + e.getMessage(), e);
                            }
                            conProp.put(HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_CODE, "TCP_MISS");
                        } else if (cacheArray != null && sizeBeforeDelete == cacheArray.length) {
                            // before we came here we deleted a cache entry
                            cacheArray = null;
                            //cacheManager.push(cacheEntry); // unnecessary update
                            conProp.put(HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_CODE, "TCP_REF_FAIL_HIT");
                        } else {
                            // before we came here we deleted a cache entry
                            response.setContent(cacheArray);
                            try {
                                Cache.store(response.url(), response.getResponseHeader(), cacheArray);
                                sb.toIndexer(response);
                            } catch (final IOException e) {
                                log.logWarning("cannot write " + response.url() + " to Cache (2): " + e.getMessage(), e);
                            }
                            conProp.put(HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_CODE, "TCP_REFRESH_MISS");
                        }
                    } else {
                        // no caching
                        if (log.isFine()) log.logFine(reqID +" "+ url.toString() + " not cached." +
                                " StoreError=" + ((storeError==null)?"None":storeError) +
                                " StoreHTCache=" + storeHTCache +
                                " SupportError=" + supportError);

//                        FileUtils.copy(res.getDataAsStream(), outStream);
                        client.writeTo(outStream);

                        conProp.put(HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_CODE,"TCP_MISS");
                    }

                    if (chunkedOut != null) {
                        chunkedOut.finish();
                        chunkedOut.flush();
                    }
                } // end hasBody
            } catch(final SocketException se) {
                // if opened ...
//                if(res != null) {
//                    // client cut proxy connection, abort download
//                    res.abort();
//                }
              client.finish();
                handleProxyException(se,conProp,respond,url);
            } finally {
                // if opened ...
//                if(res != null) {
//                    // ... close connection
//                    res.closeStream();
//                }
              client.finish();
            }
        } catch (final Exception e) {
            handleProxyException(e,conProp,respond,url);
        }
    }
View Full Code Here

TOP

Related Classes of net.yacy.cora.protocol.http.HTTPClient

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.