Package org.elasticsearch.common

Examples of org.elasticsearch.common.StopWatch$TaskInfo


                } while (!added);
            }
            String[] sValues = uniqueTerms.toArray(String.class);
            uniqueTerms = null;

            StopWatch stopWatch = new StopWatch().start();

            System.out.println("--> Indexing [" + COUNT + "] ...");
            long ITERS = COUNT / BATCH;
            long i = 1;
            int counter = 0;
            for (; i <= ITERS; i++) {
                BulkRequestBuilder request = client.prepareBulk();
                for (int j = 0; j < BATCH; j++) {
                    counter++;

                    XContentBuilder builder = jsonBuilder().startObject();
                    builder.field("id", Integer.toString(counter));
                    final String sValue = sValues[ThreadLocalRandom.current().nextInt(sValues.length)];
                    final long lValue = ThreadLocalRandom.current().nextInt(NUMBER_OF_TERMS);
                    builder.field("s_value", sValue);
                    builder.field("l_value", lValue);
                    builder.field("s_value_dv", sValue);
                    builder.field("l_value_dv", lValue);

                    for (String field : new String[] {"sm_value", "sm_value_dv"}) {
                        builder.startArray(field);
                        for (int k = 0; k < NUMBER_OF_MULTI_VALUE_TERMS; k++) {
                            builder.value(sValues[ThreadLocalRandom.current().nextInt(sValues.length)]);
                        }
                        builder.endArray();
                    }

                    for (String field : new String[] {"lm_value", "lm_value_dv"}) {
                        builder.startArray(field);
                        for (int k = 0; k < NUMBER_OF_MULTI_VALUE_TERMS; k++) {
                            builder.value(ThreadLocalRandom.current().nextInt(NUMBER_OF_TERMS));
                        }
                        builder.endArray();
                    }

                    builder.endObject();

                    request.add(Requests.indexRequest("test").type("type1").id(Integer.toString(counter))
                            .source(builder));
                }
                BulkResponse response = request.execute().actionGet();
                if (response.hasFailures()) {
                    System.err.println("--> failures...");
                }
                if (((i * BATCH) % 10000) == 0) {
                    System.out.println("--> Indexed " + (i * BATCH) + " took " + stopWatch.stop().lastTaskTime());
                    stopWatch.start();
                }
            }
            System.out.println("--> Indexing took " + stopWatch.totalTime() + ", TPS " + (((double) (COUNT)) / stopWatch.totalTime().secondsFrac()));
        } catch (Exception e) {
            System.out.println("--> Index already exists, ignoring indexing phase, waiting for green");
            ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().setWaitForYellowStatus().setTimeout("10m").execute().actionGet();
            if (clusterHealthResponse.isTimedOut()) {
                System.err.println("--> Timed out waiting for cluster health");
View Full Code Here


                                .endObject()
                            .endObject()
                        .endObject())
                    .execute().actionGet();

            StopWatch stopWatch = new StopWatch().start();

            System.out.println("--> Indexing [" + COUNT + "] ...");
            long iters = COUNT / BATCH;
            long i = 1;
            int counter = 0;
            for (; i <= iters; i++) {
                BulkRequestBuilder request = client.prepareBulk();
                for (int j = 0; j < BATCH; j++) {
                    counter++;
                    final long value = lValues[r.nextInt(lValues.length)];
                    XContentBuilder source = jsonBuilder().startObject()
                            .field("id", Integer.valueOf(counter))
                            .field("l_value", value)
                            .field("i_value", (int) value)
                            .field("s_value", (short) value)
                            .field("b_value", (byte) value)
                            .field("date", new Date())
                            .endObject();
                    request.add(Requests.indexRequest("test").type("type1").id(Integer.toString(counter))
                            .source(source));
                }
                BulkResponse response = request.execute().actionGet();
                if (response.hasFailures()) {
                    System.err.println("--> failures...");
                }
                if (((i * BATCH) % 10000) == 0) {
                    System.out.println("--> Indexed " + (i * BATCH) + " took " + stopWatch.stop().lastTaskTime());
                    stopWatch.start();
                }
            }
            client.admin().indices().prepareFlush("test").execute().actionGet();
            System.out.println("--> Indexing took " + stopWatch.totalTime() + ", TPS " + (((double) (COUNT)) / stopWatch.totalTime().secondsFrac()));
        } catch (Exception e) {
            System.out.println("--> Index already exists, ignoring indexing phase, waiting for green");
            ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("10m").execute().actionGet();
            if (clusterHealthResponse.isTimedOut()) {
                System.err.println("--> Timed out waiting for cluster health");
View Full Code Here

                        .endObject()
                    .endObject().endObject().endObject())).actionGet();

            System.out.println("Indexing " + NUM_DOCS + " documents");

            StopWatch stopWatch = new StopWatch().start();
            for (int i = 0; i < NUM_DOCS; ) {
                BulkRequestBuilder request = client.prepareBulk();
                for (int j = 0; j < BATCH && i < NUM_DOCS; ++j) {
                    final int lowCard = RandomInts.randomInt(R, LOW_CARD);
                    final int highCard = RandomInts.randomInt(R, HIGH_CARD);
                    request.add(client.prepareIndex("index", "type", Integer.toString(i)).setSource("low_card_str_value", "str" + lowCard, "high_card_str_value", "str" + highCard, "low_card_num_value", lowCard , "high_card_num_value", highCard));
                    ++i;
                }
                BulkResponse response = request.execute().actionGet();
                if (response.hasFailures()) {
                    System.err.println("--> failures...");
                    System.err.println(response.buildFailureMessage());
                }
                if ((i % 100000) == 0) {
                    System.out.println("--> Indexed " + i + " took " + stopWatch.stop().lastTaskTime());
                    stopWatch.start();
                }
            }

            client.admin().indices().prepareRefresh("index").execute().actionGet();
        } catch (Exception e) {
View Full Code Here

        Thread.sleep(10000);
        try {
            client.admin().indices().create(createIndexRequest("test")).actionGet();

            StopWatch stopWatch = new StopWatch().start();

            System.out.println("--> Indexing [" + COUNT + "] ...");
            long ITERS = COUNT / BATCH;
            long i = 1;
            int counter = 0;
            long[] currentTimeInMillis1 = new long[]{System.currentTimeMillis()};
            long[] currentTimeInMillis2 = new long[]{System.currentTimeMillis()};
            long startTimeInMillis = currentTimeInMillis1[0];
            long averageMillisChange = TIME_PERIOD / COUNT * 2;
            long backwardSkew = Math.max(1, (long) (averageMillisChange * 0.1));
            long bigOutOfOrder = 1;
            for (; i <= ITERS; i++) {
                BulkRequestBuilder request = client.prepareBulk();
                for (int j = 0; j < BATCH; j++) {
                    counter++;

                    XContentBuilder builder = jsonBuilder().startObject();
                    builder.field("id", Integer.toString(counter));
                    // move forward in time and sometimes a little bit back (delayed delivery)
                    long diff = ThreadLocalRandom.current().nextLong(2 * averageMillisChange + 2 * backwardSkew) - backwardSkew;
                    long[] currentTime = counter % 2 == 0 ? currentTimeInMillis1 : currentTimeInMillis2;
                    currentTime[0] += diff;
                    if (ThreadLocalRandom.current().nextLong(100) <= bigOutOfOrder) {
                        builder.field("l_value", currentTime[0] - 60000); // 1m delays
                    } else {
                        builder.field("l_value", currentTime[0]);
                    }

                    builder.endObject();

                    request.add(Requests.indexRequest("test").type("type1").id(Integer.toString(counter))
                            .source(builder));
                }
                BulkResponse response = request.execute().actionGet();
                if (response.hasFailures()) {
                    System.err.println("--> failures...");
                }
                if (((i * BATCH) % 10000) == 0) {
                    System.out.println("--> Indexed " + (i * BATCH) + " took " + stopWatch.stop().lastTaskTime());
                    stopWatch.start();
                }
            }
            System.out.println("--> Indexing took " + stopWatch.totalTime() + ", TPS " + (((double) (COUNT)) / stopWatch.totalTime().secondsFrac()));
            System.out.println("Time range 1: " + (currentTimeInMillis1[0] - startTimeInMillis) / 1000.0 / 3600 + " hours");
            System.out.println("Time range 2: " + (currentTimeInMillis2[0] - startTimeInMillis) / 1000.0 / 3600 + " hours");
            System.out.println("--> optimizing index");
            client.admin().indices().prepareOptimize().setMaxNumSegments(1).get();
        } catch (IndexAlreadyExistsException e) {
View Full Code Here

    private static long NUMBER_OF_ITERATIONS = 10000;
    private static int NUMBER_OF_THREADS = 100;

    public static void main(String[] args) throws Exception {
        StopWatch stopWatch = new StopWatch().start();
        System.out.println("Running " + NUMBER_OF_ITERATIONS);
        for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
            UUID.randomUUID().toString();
        }
        System.out.println("Generated in " + stopWatch.stop().totalTime() + " TP Millis " + (NUMBER_OF_ITERATIONS / stopWatch.totalTime().millisFrac()));

        System.out.println("Generating using " + NUMBER_OF_THREADS + " threads with " + NUMBER_OF_ITERATIONS + " iterations");
        final CountDownLatch latch = new CountDownLatch(NUMBER_OF_THREADS);
        Thread[] threads = new Thread[NUMBER_OF_THREADS];
        for (int i = 0; i < threads.length; i++) {
            threads[i] = new Thread(new Runnable() {
                @Override
                public void run() {
                    for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
                        UUID.randomUUID().toString();
                    }
                    latch.countDown();
                }
            });
        }
        stopWatch = new StopWatch().start();
        for (Thread thread : threads) {
            thread.start();
        }
        latch.await();
        stopWatch.stop();
        System.out.println("Generate in " + stopWatch.totalTime() + " TP Millis " + ((NUMBER_OF_ITERATIONS * NUMBER_OF_THREADS) / stopWatch.totalTime().millisFrac()));
    }
View Full Code Here

    private static boolean USE_NANO_TIME = false;
    private static long NUMBER_OF_ITERATIONS = 1000000;
    private static int NUMBER_OF_THREADS = 100;

    public static void main(String[] args) throws Exception {
        StopWatch stopWatch = new StopWatch().start();
        System.out.println("Running " + NUMBER_OF_ITERATIONS);
        for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
            System.currentTimeMillis();
        }
        System.out.println("Took " + stopWatch.stop().totalTime() + " TP Millis " + (NUMBER_OF_ITERATIONS / stopWatch.totalTime().millisFrac()));

        System.out.println("Running using " + NUMBER_OF_THREADS + " threads with " + NUMBER_OF_ITERATIONS + " iterations");
        final CountDownLatch latch = new CountDownLatch(NUMBER_OF_THREADS);
        Thread[] threads = new Thread[NUMBER_OF_THREADS];
        for (int i = 0; i < threads.length; i++) {
            threads[i] = new Thread(new Runnable() {
                @Override
                public void run() {
                    if (USE_NANO_TIME) {
                        for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
                            System.nanoTime();
                        }
                    } else {
                        for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
                            System.currentTimeMillis();
                        }
                    }
                    latch.countDown();
                }
            });
        }
        stopWatch = new StopWatch().start();
        for (Thread thread : threads) {
            thread.start();
        }
        latch.await();
        stopWatch.stop();
        System.out.println("Took " + stopWatch.totalTime() + " TP Millis " + ((NUMBER_OF_ITERATIONS * NUMBER_OF_THREADS) / stopWatch.totalTime().millisFrac()));
    }
View Full Code Here

                            .endObject())
                    .execute().actionGet();
            numbers[i - TERM_QUERIES] = i;
        }

        StopWatch stopWatch = new StopWatch().start();
        System.out.println("Percolating [" + COUNT + "] ...");
        for (i = 1; i <= COUNT; i++) {
            XContentBuilder source;
            int expectedMatches;
            if (i % 2 == 0) {
                source = source(Integer.toString(i), "value");
                expectedMatches = TERM_QUERIES;
            } else {
                int number = numbers[i % RANGE_QUERIES];
                source = source(Integer.toString(i), number);
                expectedMatches = 1;
            }
            PercolateResponse percolate = client.preparePercolate()
                    .setIndices("test").setDocumentType("type1")
                    .setSource(source)
                    .execute().actionGet();
            if (percolate.getMatches().length != expectedMatches) {
                System.err.println("No matching number of queries");
            }

            if ((i % 10000) == 0) {
                System.out.println("Percolated " + i + " took " + stopWatch.stop().lastTaskTime());
                stopWatch.start();
            }
        }
        System.out.println("Percolation took " + stopWatch.totalTime() + ", TPS " + (((double) COUNT) / stopWatch.totalTime().secondsFrac()));

        clientNode.close();
        for (Node node : nodes) {
            node.close();
        }
View Full Code Here

        logger.info("--> creating alias1 ");
        assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1")));
        TimeValue timeout = TimeValue.timeValueSeconds(2);
        logger.info("--> recreating alias1 ");
        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1").setTimeout(timeout)));
        assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));

        logger.info("--> modifying alias1 to have a filter");
        stopWatch.start();
        assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
        assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));

        logger.info("--> recreating alias1 with the same filter");
        stopWatch.start();
        assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
        assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));

        logger.info("--> recreating alias1 with a different filter");
        stopWatch.start();
        assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "bar")).setTimeout(timeout)));
        assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));

        logger.info("--> verify that filter was updated");
        AliasMetaData aliasMetaData = internalCluster().clusterService().state().metaData().aliases().get("alias1").get("test");
        assertThat(aliasMetaData.getFilter().toString(), equalTo("{\"term\":{\"name\":\"bar\"}}"));

        logger.info("--> deleting alias1");
        stopWatch.start();
        assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1").setTimeout(timeout)));
        assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));

       
    }
View Full Code Here

                    }
                }
            });
        }

        StopWatch stopWatch = new StopWatch().start();
        for (int i = 0; i < NUMBER_OF_CLIENTS; i++) {
            clients[i].start();
        }

        try {
            latch.await();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        stopWatch.stop();

        System.out.println("Ran [" + NUMBER_OF_CLIENTS + "], each with [" + NUMBER_OF_ITERATIONS + "] iterations, payload [" + payloadSize + "]: took [" + stopWatch.totalTime() + "], TPS: " + (NUMBER_OF_CLIENTS * NUMBER_OF_ITERATIONS) / stopWatch.totalTime().secondsFrac());

        clientTransportService.close();
        clientThreadPool.shutdownNow();

        serverTransportService.close();
View Full Code Here

    private static long NUMBER_OF_ITERATIONS = 10000000;
    private static int NUMBER_OF_THREADS = 100;

    public static void main(String[] args) throws Exception {
        final AtomicLong counter = new AtomicLong();
        StopWatch stopWatch = new StopWatch().start();
        System.out.println("Running " + NUMBER_OF_ITERATIONS);
        for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
            counter.incrementAndGet();
        }
        System.out.println("Took " + stopWatch.stop().totalTime() + " TP Millis " + (NUMBER_OF_ITERATIONS / stopWatch.totalTime().millisFrac()));

        System.out.println("Running using " + NUMBER_OF_THREADS + " threads with " + NUMBER_OF_ITERATIONS + " iterations");
        final CountDownLatch latch = new CountDownLatch(NUMBER_OF_THREADS);
        Thread[] threads = new Thread[NUMBER_OF_THREADS];
        for (int i = 0; i < threads.length; i++) {
            threads[i] = new Thread(new Runnable() {
                @Override
                public void run() {
                    for (long i = 0; i < NUMBER_OF_ITERATIONS; i++) {
                        counter.incrementAndGet();
                    }
                    latch.countDown();
                }
            });
        }
        stopWatch = new StopWatch().start();
        for (Thread thread : threads) {
            thread.start();
        }
        latch.await();
        stopWatch.stop();
        System.out.println("Took " + stopWatch.totalTime() + " TP Millis " + ((NUMBER_OF_ITERATIONS * NUMBER_OF_THREADS) / stopWatch.totalTime().millisFrac()));
    }
View Full Code Here

TOP

Related Classes of org.elasticsearch.common.StopWatch$TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.