Package org.apache.hadoop.tools.rumen

Examples of org.apache.hadoop.tools.rumen.JobStoryProducer


    Configuration conf = new Configuration();
    File fin = new File("src" + File.separator + "test" + File.separator
            + "resources" + File.separator + "data" + File.separator
            + "wordcount2.json");
    // read couple jobs from wordcount2.json
    JobStoryProducer jobProducer = new ZombieJobProducer(new Path(
            fin.getAbsolutePath()), null, conf);
    CountDownLatch startFlag = new CountDownLatch(1);
    UserResolver resolver = new SubmitterUserResolver();
    FakeJobSubmitter submitter = new FakeJobSubmitter();
    File ws = new File("target" + File.separator + this.getClass().getName());
View Full Code Here


    InputStream origStdIn = System.in;
    InputStream tmpIs = null;
    try {
      DebugGridmix dgm = new DebugGridmix();
      JobStoryProducer jsp
        = dgm.createJobStoryProducer(inputFile.toString(), conf);

      System.out.println("Verifying JobStory from compressed trace...");
      verifyWordCountJobStory(jsp.getNextJob());

      expandGzippedTrace(lfs, inputFile, tempFile);
      jsp = dgm.createJobStoryProducer(tempFile.toString(), conf);
      System.out.println("Verifying JobStory from uncompressed trace...");
      verifyWordCountJobStory(jsp.getNextJob());

      tmpIs = lfs.open(tempFile);
      System.setIn(tmpIs);
      System.out.println("Verifying JobStory from trace in standard input...");
      jsp = dgm.createJobStoryProducer("-", conf);
      verifyWordCountJobStory(jsp.getNextJob());
    } finally {
      System.setIn(origStdIn);
      if (tmpIs != null) {
  tmpIs.close();
      }
View Full Code Here

    distCacheEmulator.init(traceIn, factory.jobCreator, generate);
    int exitCode = 0;
    if (distCacheEmulator.shouldGenerateDistCacheData() ||
        distCacheEmulator.shouldEmulateDistCacheLoad()) {

      JobStoryProducer jsp = createJobStoryProducer(traceIn, conf);
      exitCode = distCacheEmulator.setupGenerateDistCacheData(jsp);
      if (exitCode == 0) {
        // If there are files to be generated, run a MapReduce job to generate
        // these distributed cache files of all the simulated jobs of this trace.
        writeDistCacheData(conf);
View Full Code Here

    // create TTs based on topology.json 
    long firstJobStartTime = startTaskTrackers(cluster, jobConf, now);

    long subRandomSeed = RandomSeedGenerator.getSeed("forSimulatorJobStoryProducer",
                                                     masterRandomSeed);
    JobStoryProducer jobStoryProducer = new SimulatorJobStoryProducer(
        new Path(traceFile), cluster, firstJobStartTime, jobConf, subRandomSeed);

    final SimulatorJobSubmissionPolicy submissionPolicy = SimulatorJobSubmissionPolicy
        .getPolicy(jobConf);
View Full Code Here

    distCacheEmulator.init(traceIn, factory.jobCreator, generate);
    int exitCode = 0;
    if (distCacheEmulator.shouldGenerateDistCacheData() ||
        distCacheEmulator.shouldEmulateDistCacheLoad()) {

      JobStoryProducer jsp = createJobStoryProducer(traceIn, conf);
      exitCode = distCacheEmulator.setupGenerateDistCacheData(jsp);
      if (exitCode == 0) {
        // If there are files to be generated, run a MapReduce job to generate
        // these distributed cache files of all the simulated jobs of this trace.
        writeDistCacheData(conf);
View Full Code Here

    InputStream origStdIn = System.in;
    InputStream tmpIs = null;
    try {
      DebugGridmix dgm = new DebugGridmix();
      JobStoryProducer jsp = dgm.createJobStoryProducer(inputFile.toString(),
              conf);

      LOG.info("Verifying JobStory from compressed trace...");
      verifyWordCountJobStory(jsp.getNextJob());

      expandGzippedTrace(lfs, inputFile, tempFile);
      jsp = dgm.createJobStoryProducer(tempFile.toString(), conf);
      LOG.info("Verifying JobStory from uncompressed trace...");
      verifyWordCountJobStory(jsp.getNextJob());

      tmpIs = lfs.open(tempFile);
      System.setIn(tmpIs);
      LOG.info("Verifying JobStory from trace in standard input...");
      jsp = dgm.createJobStoryProducer("-", conf);
      verifyWordCountJobStory(jsp.getNextJob());
    } finally {
      System.setIn(origStdIn);
      if (tmpIs != null) {
        tmpIs.close();
      }
View Full Code Here

    distCacheEmulator.init(traceIn, factory.jobCreator, generate);
    int exitCode = 0;
    if (distCacheEmulator.shouldGenerateDistCacheData() ||
        distCacheEmulator.shouldEmulateDistCacheLoad()) {

      JobStoryProducer jsp = createJobStoryProducer(traceIn, conf);
      exitCode = distCacheEmulator.setupGenerateDistCacheData(jsp);
      if (exitCode == 0) {
        // If there are files to be generated, run a MapReduce job to generate
        // these distributed cache files of all the simulated jobs of this trace.
        writeDistCacheData(conf);
View Full Code Here

    Configuration conf = new Configuration();
    File fin = new File("src" + File.separator + "test" + File.separator
            + "resources" + File.separator + "data" + File.separator
            + "wordcount2.json");
    // read couple jobs from wordcount2.json
    JobStoryProducer jobProducer = new ZombieJobProducer(new Path(
            fin.getAbsolutePath()), null, conf);
    CountDownLatch startFlag = new CountDownLatch(1);
    UserResolver resolver = new SubmitterUserResolver();
    FakeJobSubmitter submitter = new FakeJobSubmitter();
    File ws = new File("target" + File.separator + this.getClass().getName());
View Full Code Here

    distCacheEmulator.init(traceIn, factory.jobCreator, generate);
    int exitCode = 0;
    if (distCacheEmulator.shouldGenerateDistCacheData() ||
        distCacheEmulator.shouldEmulateDistCacheLoad()) {

      JobStoryProducer jsp = createJobStoryProducer(traceIn, conf);
      exitCode = distCacheEmulator.setupGenerateDistCacheData(jsp);
      if (exitCode == 0) {
        // If there are files to be generated, run a MapReduce job to generate
        // these distributed cache files of all the simulated jobs of this trace.
        writeDistCacheData(conf);
View Full Code Here

    InputStream origStdIn = System.in;
    InputStream tmpIs = null;
    try {
      DebugGridmix dgm = new DebugGridmix();
      JobStoryProducer jsp
        = dgm.createJobStoryProducer(inputFile.toString(), conf);

      System.out.println("Verifying JobStory from compressed trace...");
      verifyWordCountJobStory(jsp.getNextJob());

      expandGzippedTrace(lfs, inputFile, tempFile);
      jsp = dgm.createJobStoryProducer(tempFile.toString(), conf);
      System.out.println("Verifying JobStory from uncompressed trace...");
      verifyWordCountJobStory(jsp.getNextJob());

      tmpIs = lfs.open(tempFile);
      System.setIn(tmpIs);
      System.out.println("Verifying JobStory from trace in standard input...");
      jsp = dgm.createJobStoryProducer("-", conf);
      verifyWordCountJobStory(jsp.getNextJob());
    } finally {
      System.setIn(origStdIn);
      if (tmpIs != null) {
  tmpIs.close();
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.tools.rumen.JobStoryProducer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.