Package org.apache.hadoop.util

Examples of org.apache.hadoop.util.Tool


  private void runBalancerCli(Configuration conf,
      long totalUsedSpace, long totalCapacity) throws Exception {
    waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);

    final String[] args = { "-policy", "datanode" };
    final Tool tool = new Cli();   
    tool.setConf(conf);
    final int r = tool.run(args); // start rebalancing
   
    assertEquals("Tools should exit 0 on success", 0, r);
    waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
    LOG.info("Rebalancing with default ctor.");
    waitForBalancer(totalUsedSpace, totalCapacity, client, cluster);
View Full Code Here


  }

  @Override
  public int runTestFromCommandLine() throws Exception {

    Tool tool = null;
    if (toRun.equals("Generator")) {
      tool = new Generator();
    } else if (toRun.equals("Verify")) {
      tool = new Verify();
    } else if (toRun.equals("Loop")) {
View Full Code Here

  public int run(String[] args) throws Exception {
    //get the class, run with the conf
    if (args.length < 1) {
      return printUsage();
    }
    Tool tool = null;
    if (args[0].equals("Generator")) {
      tool = new Generator();
    } else if (args[0].equals("Verify")) {
      tool = new Verify();
    } else if (args[0].equals("Loop")) {
View Full Code Here

    }
  }

  @Test
  public void testIncorrectParameters() throws Exception {
    Tool tool = new InputSampler<Object,Object>(new Configuration());

    int result = tool.run(new String[] { "-r" });
    assertTrue(result != 0);

    result = tool.run(new String[] { "-r", "not-a-number" });
    assertTrue(result != 0);

    // more than one reducer is required:
    result = tool.run(new String[] { "-r", "1" });
    assertTrue(result != 0);

    try {
      result = tool.run(new String[] { "-inFormat", "java.lang.Object" });
      fail("ClassCastException expected");
    } catch (ClassCastException cce) {
      // expected
    }

    try {
      result = tool.run(new String[] { "-keyClass", "java.lang.Object" });
      fail("ClassCastException expected");
    } catch (ClassCastException cce) {
      // expected
    }

    result = tool.run(new String[] { "-splitSample", "1", });
    assertTrue(result != 0);

    result = tool.run(new String[] { "-splitRandom", "1.0", "2", "xxx" });
    assertTrue(result != 0);

    result = tool.run(new String[] { "-splitInterval", "yyy", "5" });
    assertTrue(result != 0);

    // not enough subsequent arguments:
    result = tool.run(new String[] { "-r", "2", "-splitInterval", "11.0f", "0", "input" });
    assertTrue(result != 0);
  }
View Full Code Here

    assertTrue(result != 0);
  }

  @Test
  public void testSplitSample() throws Exception {
    Tool tool = new InputSampler<Object,Object>(new Configuration());
    int result = tool.run(new String[] { "-r", Integer.toString(NUM_REDUCES),
        "-splitSample", "10", "100",
        input1, input2, output });
    assertEquals(0, result);

    Object[] partitions = readPartitions(output);
View Full Code Here

  }

  @Test
  @SuppressWarnings("unchecked")
  public void testSplitRamdom() throws Exception {
    Tool tool = new InputSampler<Object,Object>(new Configuration());
    int result = tool.run(new String[] { "-r", Integer.toString(NUM_REDUCES),
        // Use 0.999 probability to reduce the flakiness of the test because
        // the test will fail if the number of samples is less than (number of reduces + 1).
        "-splitRandom", "0.999f", "20", "100",
        input1, input2, output });
    assertEquals(0, result);
View Full Code Here

    assertArrayEquals(sortedPartitions, partitions);
  }

  @Test
  public void testSplitInterval() throws Exception {
    Tool tool = new InputSampler<Object,Object>(new Configuration());
    int result = tool.run(new String[] { "-r", Integer.toString(NUM_REDUCES),
        "-splitInterval", "0.5f", "0",
        input1, input2, output });
    assertEquals(0, result);
    Object[] partitions = readPartitions(output);
    assertArrayEquals(new LongWritable[] { new LongWritable(7L), new LongWritable(9L),
View Full Code Here

        table
    };

    // run the job, complete the load.
    util.createTable(table, cf);
    Tool t = TestImportTsv.doMROnTableTest(util, cf, simple_tsv, args);
    doLoadIncrementalHFiles(hfiles, table);

    // validate post-conditions
    validateDeletedPartitionsFile(t.getConf());

    // clean up after ourselves.
    util.deleteTable(table);
    util.cleanupDataTestDirOnTestFS(table);
    LOG.info("testGenerateAndLoad completed successfully.");
View Full Code Here

  public int run(String[] args) throws Exception {
    //get the class, run with the conf
    if (args.length < 1) {
      return printUsage();
    }
    Tool tool = null;
    if (args[0].equals("Generator")) {
      tool = new Generator();
    } else if (args[0].equals("Verify")) {
      tool = new Verify();
    } else if (args[0].equals("Loop")) {
View Full Code Here

      String codec = options.getCompressionCodec();
      if (codec != null && (codec.equals(CodecMap.LZOP)
              || codec.equals(CodecMap.getCodecClassName(CodecMap.LZOP)))) {
        try {
          Tool tool = ReflectionUtils.newInstance(Class.
                  forName("com.hadoop.compression.lzo.DistributedLzoIndexer").
                  asSubclass(Tool.class), configuration);
          ToolRunner.run(configuration, tool,
              new String[] { finalPath.toString() });
        } catch (Exception ex) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.util.Tool

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.