Package org.apache.hadoop.util

Examples of org.apache.hadoop.util.ProgramDriver


  private TamingTextDriver() {
  }

  public static void main(String[] args) throws Throwable {
    try {
      ProgramDriver programDriver = new ProgramDriver();
      Properties mainClasses = new Properties();
      InputStream propsStream = Thread.currentThread()
                                      .getContextClassLoader()
                                      .getResourceAsStream("driver.classes.props");

      mainClasses.load(propsStream);

      boolean foundShortName = false;
      for(Object key :  mainClasses.keySet()) {
        String keyString = (String) key;
        if(args.length > 0 && shortName(mainClasses.getProperty(keyString)).equals(args[0])) {
          foundShortName = true;
        }
        addClass(programDriver, keyString, mainClasses.getProperty(keyString));
      }
      if(args.length < 1 || args[0] == null || args[0].equals("-h") || args[0].equals("--help")) {
        programDriver.driver(args);
      }
      String progName = args[0];
      if(!foundShortName) {
        addClass(programDriver, progName, progName);
      }
      shift(args);

      InputStream defaultsStream = Thread.currentThread()
                                         .getContextClassLoader()
                                         .getResourceAsStream(progName + ".props");

      Properties mainProps = new Properties();
      if (defaultsStream != null) { // can't find props file, use empty props.
        mainProps.load(defaultsStream);
      } else {
        log.warn("No " + progName + ".props found on classpath, will use command-line arguments only");
      }
      Map<String,String[]> argMap = new HashMap<String,String[]>();
      int i=0;
      while(i<args.length && args[i] != null) {
        List<String> argValues = new ArrayList<String>();
        String arg = args[i];
        i++;
        if(arg.length() > 2 && arg.charAt(1) == 'D') { // '-Dkey=value' or '-Dkey=value1,value2,etc' case
          String[] argSplit = arg.split("=");
          arg = argSplit[0];
          if(argSplit.length == 2) {
            argValues.add(argSplit[1]);
          }
        } else {                                      // '-key [values]' or '--key [values]' case.
          while(i<args.length && args[i] != null) {
            if(args[i].length() > 0 && args[i].charAt(0) != '-') {
              argValues.add(args[i]);
              i++;
            } else {
              break;
            }
          }
        }
        argMap.put(arg, argValues.toArray(new String[argValues.size()]));
      }
      for (String key : mainProps.stringPropertyNames()) {
        String[] argNamePair = key.split("\\|");
        String shortArg = '-' + argNamePair[0].trim();
        String longArg = argNamePair.length < 2 ? null : "--" + argNamePair[1].trim();
        if(!argMap.containsKey(shortArg) && (longArg == null || !argMap.containsKey(longArg))) {
          argMap.put(longArg, new String[] { mainProps.getProperty(key) } );
        }
      }
      List<String> argsList = new ArrayList<String>();
      argsList.add(progName);
      for(String arg : argMap.keySet()) {
        if(arg.startsWith("-D")) { // arg is -Dkey - if value for this !isEmpty(), then arg -> -Dkey + "=" + value
          if(argMap.get(arg).length > 0 && !argMap.get(arg)[0].trim().isEmpty()) {
            arg += '=' + argMap.get(arg)[0].trim();
          }
        }
        argsList.add(arg);
        if(!arg.startsWith("-D")) {
          argsList.addAll(Arrays.asList(argMap.get(arg)));
        }
      }
      programDriver.driver(argsList.toArray(new String[argsList.size()]));
    } catch (Throwable e) {
      log.error("TamingTextDriver failed with args: " + Arrays.toString(args) + '\n' + e.getMessage());
      throw e;
    }
  }
View Full Code Here


 
  /**
   * A description of the test program for running all the tests using jar file
   */
  public static void main(String argv[]){
    ProgramDriver pgd = new ProgramDriver();
    try {
      pgd.addClass("threadedmapbench", ThreadedMapBenchmark.class,
                   "A map/reduce benchmark that compares the performance " +
                   "of maps with multiple spills over maps with 1 spill");
      pgd.addClass("mrbench", MRBench.class, "A map/reduce benchmark that can create many small jobs");
      pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode.");
      pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
      pgd.addClass("testfilesystem", TestFileSystem.class, "A test for FileSystem read/write.");
      pgd.addClass("testsequencefile", TestSequenceFile.class, "A test for flat files of binary key value pairs.");
      pgd.addClass("testsetfile", TestSetFile.class, "A test for flat files of binary key/value pairs.");
      pgd.addClass("testarrayfile", TestArrayFile.class, "A test for flat files of binary key/value pairs.");
      pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
      pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
      pgd.addClass("testsequencefileinputformat", TestSequenceFileInputFormat.class, "A test for sequence file input format.");
      pgd.addClass("testtextinputformat", TestTextInputFormat.class, "A test for text input format.");
      pgd.addClass("TestDFSIO", TestDFSIO.class, "Distributed i/o benchmark.");
      pgd.addClass("DFSCIOTest", DFSCIOTest.class, "Distributed i/o benchmark of libhdfs.");
      pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, "Distributed checkup of the file system consistency.");
      pgd.addClass("testmapredsort", SortValidator.class,
                   "A map/reduce program that validates the map-reduce framework's sort.");
      pgd.addClass("testbigmapoutput", BigMapOutput.class,
                   "A map/reduce program that works on a very big " +
                   "non-splittable file and does identity map/reduce");
      pgd.addClass("loadgen", GenericMRLoadGenerator.class, "Generic map/reduce load generator");
      pgd.addClass("filebench", FileBench.class, "Benchmark SequenceFile(Input|Output)Format (block,record compressed and uncompressed), Text(Input|Output)Format (compressed and uncompressed)");
      pgd.addClass("dfsthroughput", BenchmarkThroughput.class,
                   "measure hdfs throughput");
      pgd.addClass("MRReliabilityTest", ReliabilityTest.class,
          "A program that tests the reliability of the MR framework by " +
          "injecting faults/failures");
      pgd.addClass("minicluster", MiniHadoopClusterManager.class,
          "Single process HDFS and MR cluster.");
      pgd.driver(argv);
    } catch(Throwable e) {
      e.printStackTrace();
    }
  }
View Full Code Here

  private MahoutDriver() {
  }

  public static void main(String[] args) throws Throwable {
    try {
      ProgramDriver programDriver = new ProgramDriver();
      Properties mainClasses = new Properties();
      InputStream propsStream = Thread.currentThread()
                                      .getContextClassLoader()
                                      .getResourceAsStream("driver.classes.props");

      mainClasses.load(propsStream);

      boolean foundShortName = false;
      for(Object key :  mainClasses.keySet()) {
        String keyString = (String) key;
        if(args.length > 0 && shortName(mainClasses.getProperty(keyString)).equals(args[0])) {
          foundShortName = true;
        }
        addClass(programDriver, keyString, mainClasses.getProperty(keyString));
      }
      if(args.length < 1 || args[0] == null || args[0].equals("-h") || args[0].equals("--help")) {
        programDriver.driver(args);
      }
      String progName = args[0];
      if(!foundShortName) {
        addClass(programDriver, progName, progName);
      }
      shift(args);

      InputStream defaultsStream = Thread.currentThread()
                                         .getContextClassLoader()
                                         .getResourceAsStream(progName + ".props");

      Properties mainProps = new Properties();
      if (defaultsStream != null) { // can't find props file, use empty props.
        mainProps.load(defaultsStream);
      } else {
        log.warn("No " + progName + ".props found on classpath, will use command-line arguments only");
      }
      Map<String,String[]> argMap = new HashMap<String,String[]>();
      int i=0;
      while(i<args.length && args[i] != null) {
        List<String> argValues = new ArrayList<String>();
        String arg = args[i];
        i++;
        if(arg.length() > 2 && arg.charAt(1) == 'D') { // '-Dkey=value' or '-Dkey=value1,value2,etc' case
          String[] argSplit = arg.split("=");
          arg = argSplit[0];
          if(argSplit.length == 2) {
            argValues.add(argSplit[1]);
          }
        } else {                                      // '-key [values]' or '--key [values]' case.
          while(i<args.length && args[i] != null) {
            if(args[i].length() > 0 && args[i].charAt(0) != '-') {
              argValues.add(args[i]);
              i++;
            } else {
              break;
            }
          }
        }
        argMap.put(arg, argValues.toArray(new String[argValues.size()]));
      }
      for (String key : mainProps.stringPropertyNames()) {
        String[] argNamePair = key.split("\\|");
        String shortArg = '-' + argNamePair[0].trim();
        String longArg = argNamePair.length < 2 ? null : "--" + argNamePair[1].trim();
        if(!argMap.containsKey(shortArg) && (longArg == null || !argMap.containsKey(longArg))) {
          argMap.put(longArg, new String[] { mainProps.getProperty(key) } );
        }
      }
      List<String> argsList = new ArrayList<String>();
      argsList.add(progName);
      for(String arg : argMap.keySet()) {
        if(arg.startsWith("-D")) { // arg is -Dkey - if value for this !isEmpty(), then arg -> -Dkey + "=" + value
          if(argMap.get(arg).length > 0 && !argMap.get(arg)[0].trim().isEmpty()) {
            arg += '=' + argMap.get(arg)[0].trim();
          }
        }
        argsList.add(arg);
        if(!arg.startsWith("-D")) {
          argsList.addAll(Arrays.asList(argMap.get(arg)));
        }
      }
      programDriver.driver(argsList.toArray(new String[argsList.size()]));
    } catch (Throwable e) {
      log.error("MahoutDriver failed with args: " + Arrays.toString(args) + '\n' + e.getMessage());
      throw e;
    }
  }
View Full Code Here

 
  /**
   * A description of the test program for running all the tests using jar file
   */
  public static void main(String argv[]){
    ProgramDriver pgd = new ProgramDriver();
    try {
      pgd.addClass("mrbench", MRBench.class, "A map/reduce benchmark that can create many small jobs");
      pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode.");
      pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
      pgd.addClass("clustertestdfs", ClusterTestDFS.class, "A pseudo distributed test for DFS.");
      pgd.addClass("testfilesystem", TestFileSystem.class, "A test for FileSystem read/write.");
      pgd.addClass("testsequencefile", TestSequenceFile.class, "A test for flat files of binary key value pairs.");
      pgd.addClass("testsetfile", TestSetFile.class, "A test for flat files of binary key/value pairs.");
      pgd.addClass("testarrayfile", TestArrayFile.class, "A test for flat files of binary key/value pairs.");
      pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
      pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
      pgd.addClass("testsequencefileinputformat", TestSequenceFileInputFormat.class, "A test for sequence file input format.");
      pgd.addClass("testtextinputformat", TestTextInputFormat.class, "A test for text input format.");
      pgd.addClass("TestDFSIO", TestDFSIO.class, "Distributed i/o benchmark.");
      pgd.addClass("DFSCIOTest", DFSCIOTest.class, "Distributed i/o benchmark of libhdfs.");
      pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, "Distributed checkup of the file system consistency.");
      pgd.addClass("testmapredsort", SortValidator.class,
                   "A map/reduce program that validates the map-reduce framework's sort.");
      pgd.driver(argv);
    } catch(Throwable e) {
      e.printStackTrace();
    }
  }
View Full Code Here

* @author Owen O'Malley
*/
public class ExampleDriver {
 
  public static void main(String argv[]){
    ProgramDriver pgd = new ProgramDriver();
    try {
      pgd.addClass("wordcount", WordCount.class,
                   "A map/reduce program that counts the words in the input files.");
      pgd.addClass("grep", Grep.class,
                   "A map/reduce program that counts the matches of a regex in the input.");
      pgd.addClass("randomwriter", RandomWriter.class,
                   "A map/reduce program that writes 10GB of random data per node.");
      pgd.addClass("sort", Sort.class, "A map/reduce program that sorts the data written by the random writer.");
      pgd.addClass("pi", PiEstimator.class, "A map/reduce program that estimates Pi using monte-carlo method.");
      pgd.driver(argv);
    }
    catch(Throwable e){
      e.printStackTrace();
    }
  }
View Full Code Here

* human-readable description.
*/
public class ExampleDriver {
 
  public static void main(String argv[]){
    ProgramDriver pgd = new ProgramDriver();
    try {
      pgd.addClass("wordcount", WordCount.class,
                   "A map/reduce program that counts the words in the input files.");
      pgd.addClass("aggregatewordcount", AggregateWordCount.class,
                   "An Aggregate based map/reduce program that counts the words in the input files.");
      pgd.addClass("grep", Grep.class,
                   "A map/reduce program that counts the matches of a regex in the input.");
      pgd.addClass("randomwriter", RandomWriter.class,
                   "A map/reduce program that writes 10GB of random data per node.");
      pgd.addClass("randomtextwriter", RandomTextWriter.class,
      "A map/reduce program that writes 10GB of random textual data per node.");
      pgd.addClass("sort", Sort.class, "A map/reduce program that sorts the data written by the random writer.");
      pgd.addClass("pi", PiEstimator.class, "A map/reduce program that estimates Pi using monte-carlo method.");
      pgd.addClass("pentomino", DistributedPentomino.class,
      "A map/reduce tile laying program to find solutions to pentomino problems.");
      pgd.addClass("sudoku", Sudoku.class, "A sudoku solver.");
      pgd.addClass("sleep", SleepJob.class, "A job that sleeps at each map and reduce task.");
      pgd.driver(argv);
    }
    catch(Throwable e){
      e.printStackTrace();
    }
  }
View Full Code Here

 
  /**
   * A description of the test program for running all the tests using jar file
   */
  public static void main(String argv[]){
    ProgramDriver pgd = new ProgramDriver();
    try {
      pgd.addClass("mrbench", MRBench.class, "A map/reduce benchmark that can create many small jobs");
      pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode.");
      pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
      pgd.addClass("clustertestdfs", ClusterTestDFS.class, "A pseudo distributed test for DFS.");
      pgd.addClass("testfilesystem", TestFileSystem.class, "A test for FileSystem read/write.");
      pgd.addClass("testsequencefile", TestSequenceFile.class, "A test for flat files of binary key value pairs.");
      pgd.addClass("testsetfile", TestSetFile.class, "A test for flat files of binary key/value pairs.");
      pgd.addClass("testarrayfile", TestArrayFile.class, "A test for flat files of binary key/value pairs.");
      pgd.addClass("testrpc", TestRPC.class, "A test for rpc.");
      pgd.addClass("testipc", TestIPC.class, "A test for ipc.");
      pgd.addClass("testsequencefileinputformat", TestSequenceFileInputFormat.class, "A test for sequence file input format.");
      pgd.addClass("testtextinputformat", TestTextInputFormat.class, "A test for text input format.");
      pgd.addClass("TestDFSIO", TestDFSIO.class, "Distributed i/o benchmark.");
      pgd.addClass("DFSCIOTest", DFSCIOTest.class, "Distributed i/o benchmark of libhdfs.");
      pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, "Distributed checkup of the file system consistency.");
      pgd.addClass("testmapredsort", SortValidator.class,
                   "A map/reduce program that validates the map-reduce framework's sort.");
      pgd.addClass("testbigmapoutput", BigMapOutput.class,
                   "A map/reduce program that works on a very big " +
                   "non-splittable file and does identity map/reduce");
      pgd.driver(argv);
    } catch(Throwable e) {
      e.printStackTrace();
    }
  }
View Full Code Here

  /**
   * @param args
   * @throws Throwable
   */
  public static void main(String[] args) throws Throwable {
    ProgramDriver pgd = new ProgramDriver();
    pgd.addClass(RowCounter.NAME, RowCounter.class,
      "Count rows in HBase table");
    pgd.addClass(Export.NAME, Export.class, "Write table data to HDFS.");
    pgd.addClass(Import.NAME, Import.class, "Import data written by Export.");
    pgd.driver(args);
  }
View Full Code Here

import org.apache.hama.examples.util.SSSPTextToSeq;

public class ExampleDriver {

  public static void main(String[] args) {
    ProgramDriver pgd = new ProgramDriver();
    try {
      pgd.addClass("pi", PiEstimator.class, "Pi Estimator");
      pgd.addClass("sssp-text2seq", SSSPTextToSeq.class, "Generates SSSP input from textfile");
      pgd.addClass("sssp", ShortestPaths.class, "Single Shortest Path");
      pgd.addClass("cmb", CombineExample.class, "Combine");
      pgd.addClass("bench", RandBench.class, "Random Benchmark");
      pgd.addClass("pagerank-text2seq", PagerankTextToSeq.class, "Generates Pagerank input from textfile");
      pgd.addClass("pagerank", PageRank.class, "PageRank");
      pgd.driver(args);
    } catch (Throwable e) {
      e.printStackTrace();
    }
  }
View Full Code Here

  /**
   * @param args
   * @throws Throwable
   */
  public static void main(String[] args) throws Throwable {
    ProgramDriver pgd = new ProgramDriver();
    pgd.addClass(RowCounter.NAME, RowCounter.class,
      "Count rows in HBase table");
    pgd.addClass(HStoreFileToStoreFile.JOBNAME,
      HStoreFileToStoreFile.class,
      "Bulk convert 0.19 HStoreFiles to 0.20 StoreFiles");
    pgd.driver(args);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.util.ProgramDriver

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.