Package org.apache.hama

Examples of org.apache.hama.HamaConfiguration


      if (job.getConfiguration().getBoolean(
          Constants.ENABLE_RUNTIME_PARTITIONING, false)
          && job.getConfiguration().get(Constants.RUNTIME_PARTITIONING_CLASS) != null) {

        HamaConfiguration conf = new HamaConfiguration(job.getConfiguration());

        conf.setInt(Constants.RUNTIME_DESIRED_PEERS_COUNT, numTasks);
        if (job.getConfiguration().get(Constants.RUNTIME_PARTITIONING_DIR) != null) {
          conf.set(Constants.RUNTIME_PARTITIONING_DIR, job.getConfiguration()
              .get(Constants.RUNTIME_PARTITIONING_DIR));
        }

        conf.set(Constants.RUNTIME_PARTITIONING_CLASS,
            job.get(Constants.RUNTIME_PARTITIONING_CLASS));
        BSPJob partitioningJob = new BSPJob(conf);
        partitioningJob.setJobName("Runtime partitioning job for "
            + partitioningJob.getJobName());
        LOG.debug("partitioningJob input: "
            + partitioningJob.get(Constants.JOB_INPUT_DIR));
        partitioningJob.setInputFormat(job.getInputFormat().getClass());
        partitioningJob.setInputKeyClass(job.getInputKeyClass());
        partitioningJob.setInputValueClass(job.getInputValueClass());
        partitioningJob.setOutputFormat(NullOutputFormat.class);
        partitioningJob.setOutputKeyClass(NullWritable.class);
        partitioningJob.setOutputValueClass(NullWritable.class);
        partitioningJob.setBspClass(PartitioningRunner.class);
        partitioningJob.set("bsp.partitioning.runner.job", "true");
        partitioningJob.getConfiguration().setBoolean(
            Constants.ENABLE_RUNTIME_PARTITIONING, false);
        partitioningJob.setOutputPath(partitionDir);

        boolean isPartitioned = false;
        try {
          isPartitioned = partitioningJob.waitForCompletion(true);
        } catch (InterruptedException e) {
          LOG.error("Interrupted partitioning run-time.", e);
        } catch (ClassNotFoundException e) {
          LOG.error("Class not found error partitioning run-time.", e);
        }

        if (isPartitioned) {
          if (job.getConfiguration().get(Constants.RUNTIME_PARTITIONING_DIR) != null) {
            job.setInputPath(new Path(conf
                .get(Constants.RUNTIME_PARTITIONING_DIR)));
          } else {
            job.setInputPath(partitionDir);
          }
          job.setBoolean("input.has.partitioned", true);
View Full Code Here


    boolean submitJob = false;
    boolean getStatus = false;
    String submitJobFile = null;
    String jobid = null;

    HamaConfiguration conf = new HamaConfiguration(getConf());
    init(conf);

    if ("-list".equals(cmd)) {
      if (args.length != 1 && !(args.length == 2 && "all".equals(args[1]))) {
        displayUsage(cmd);
        return exitCode;
      }
      if (args.length == 2 && "all".equals(args[1])) {
        listAllJobs = true;
      } else {
        listJobs = true;
      }
    } else if ("-list-active-grooms".equals(cmd)) {
      if (args.length != 1) {
        displayUsage(cmd);
        return exitCode;
      }
      listActiveGrooms = true;
    } else if ("-submit".equals(cmd)) {
      if (args.length == 1) {
        displayUsage(cmd);
        return exitCode;
      }

      submitJob = true;
      submitJobFile = args[1];
    } else if ("-kill".equals(cmd)) {
      if (args.length == 1) {
        displayUsage(cmd);
        return exitCode;
      }
      killJob = true;
      jobid = args[1];

    } else if ("-status".equals(cmd)) {
      if (args.length != 2) {
        displayUsage(cmd);
        return exitCode;
      }
      jobid = args[1];
      getStatus = true;

      // TODO Later, below functions should be implemented
      // with the Fault Tolerant mechanism.
    } else if ("-list-attempt-ids".equals(cmd)) {
      System.out.println("This function is not implemented yet.");
      return exitCode;
    } else if ("-kill-task".equals(cmd)) {
      System.out.println("This function is not implemented yet.");
      return exitCode;
    } else if ("-fail-task".equals(cmd)) {
      System.out.println("This function is not implemented yet.");
      return exitCode;
    }

    BSPJobClient jc = new BSPJobClient(new HamaConfiguration());
    if (listJobs) {
      listJobs();
      exitCode = 0;
    } else if (listAllJobs) {
      listAllJobs();
      exitCode = 0;
    } else if (listActiveGrooms) {
      listActiveGrooms();
      exitCode = 0;
    } else if (submitJob) {
      HamaConfiguration tConf = new HamaConfiguration(new Path(submitJobFile));
      RunningJob job = jc.submitJob(new BSPJob(tConf));
      System.out.println("Created job " + job.getID().toString());
    } else if (killJob) {
      RunningJob job = jc.getJob(BSPJobID.forName(jobid));
      if (job == null) {
View Full Code Here

public class TestOffHeapVerticesInfo {

  @Test
  public void testOffHeapVerticesInfoLifeCycle() throws Exception {
    OffHeapVerticesInfo<Text, NullWritable, DoubleWritable> info = new OffHeapVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    conf.set(GraphJob.VERTEX_CLASS_ATTR, PageRankVertex.class.getName());
    conf.set(GraphJob.VERTEX_EDGE_VALUE_CLASS_ATTR,
        NullWritable.class.getName());
    conf.set(GraphJob.VERTEX_ID_CLASS_ATTR, Text.class.getName());
    conf.set(GraphJob.VERTEX_VALUE_CLASS_ATTR, DoubleWritable.class.getName());
    GraphJobRunner.<Text, NullWritable, DoubleWritable> initClasses(conf);
    TaskAttemptID attempt = new TaskAttemptID("123", 1, 1, 0);
    try {
      ArrayList<PageRankVertex> list = new ArrayList<PageRankVertex>();
View Full Code Here

  @Test
  public void testAdditionWithDefaults() throws Exception {
    OffHeapVerticesInfo<Text, NullWritable, DoubleWritable> verticesInfo =
            new OffHeapVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    verticesInfo.init(null, conf, null);
    Vertex<Text, NullWritable, DoubleWritable> vertex = new PageRankVertex();
    vertex.setVertexID(new Text("some-id"));
    verticesInfo.addVertex(vertex);
    assertTrue("added vertex could not be found in the cache", verticesInfo.skippingIterator().hasNext());
View Full Code Here

  @Test
  public void testMassiveAdditionWithDefaults() throws Exception {
    OffHeapVerticesInfo<Text, NullWritable, DoubleWritable> verticesInfo =
            new OffHeapVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    verticesInfo.init(null, conf, null);
    assertEquals("vertices info size should be 0 at startup", 0, verticesInfo.size());
    Random r = new Random();
    int i = 10000;
    for (int n = 0; n < i; n++) {
View Full Code Here

  public static void main(String[] args) throws IOException,
      InterruptedException, ClassNotFoundException {
    if (args.length < 2)
      printUsage();

    HamaConfiguration conf = new HamaConfiguration(new Configuration());
    GraphJob pageJob = createJob(args, conf);

    long startTime = System.currentTimeMillis();
    if (pageJob.waitForCompletion(true)) {
      System.out.println("Job Finished in "
View Full Code Here

public class TestDiskVerticesInfo extends TestCase {

  @Test
  public void testDiskVerticesInfoLifeCycle() throws Exception {
    DiskVerticesInfo<Text, NullWritable, DoubleWritable> info = new DiskVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    conf.set(GraphJob.VERTEX_CLASS_ATTR, PageRankVertex.class.getName());
    conf.set(GraphJob.VERTEX_EDGE_VALUE_CLASS_ATTR,
        NullWritable.class.getName());
    conf.set(GraphJob.VERTEX_ID_CLASS_ATTR, Text.class.getName());
    conf.set(GraphJob.VERTEX_VALUE_CLASS_ATTR, DoubleWritable.class.getName());
    GraphJobRunner.<Text, NullWritable, DoubleWritable> initClasses(conf);
    TaskAttemptID attempt = new TaskAttemptID("omg", 1, 1, 0);
    try {
      ArrayList<PageRankVertex> list = new ArrayList<PageRankVertex>();
View Full Code Here

          .println("Usage: <size n> <1/x density> <output path> <number of tasks>");
      System.exit(1);
    }

    // BSP job configuration
    HamaConfiguration conf = new HamaConfiguration();

    conf.setInt(SIZE_OF_MATRIX, Integer.parseInt(args[0]));
    conf.setInt(DENSITY, Integer.parseInt(args[1]));

    BSPJob bsp = new BSPJob(conf, CombineExample.class);
    // Set the job name
    bsp.setJobName("Random Symmetric Matrix Generator");
    bsp.setBspClass(SymmetricMatrixGenBSP.class);
View Full Code Here

      }
      matrix = transposed;
    }

    // Write matrix to DFS
    HamaConfiguration conf = new HamaConfiguration();
    SequenceFile.Writer writer = null;
    try {
      FileSystem fs = FileSystem.get(conf);
      // use PipesVectorWritable if specified
      if (usePipesVectorWritable) {
View Full Code Here

          .println("Usage: <size n> <max out-edges> <output path> <number of tasks>");
      System.exit(1);
    }

    // BSP job configuration
    HamaConfiguration conf = new HamaConfiguration();

    conf.setInt(SIZE_OF_MATRIX, Integer.parseInt(args[0]));
    conf.setInt(MAX_EDGES, Integer.parseInt(args[1]));

    BSPJob bsp = new BSPJob(conf, FastGraphGenBSP.class);
    // Set the job name
    bsp.setJobName("Random Fast Matrix Generator");
    bsp.setBspClass(FastGraphGenBSP.class);
View Full Code Here

TOP

Related Classes of org.apache.hama.HamaConfiguration

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.