Package co.cask.cdap.app.program

Examples of co.cask.cdap.app.program.Program


      final File destinationUnpackedJarDir = new File(tmpDir, String.format("%s.%s",
                                                                      program.getName(), UUID.randomUUID().toString()));
      Preconditions.checkState(!destinationUnpackedJarDir.exists());
      destinationUnpackedJarDir.mkdirs();

      Program bundleJarProgram = Programs.createWithUnpack(program.getJarLocation(), destinationUnpackedJarDir);
      RuntimeInfo info = super.run(bundleJarProgram, options);
      info.getController().addListener(new AbstractListener() {
        @Override
        public void stopped() {
          try {
View Full Code Here


      ProgramRuntimeService.RuntimeInfo existingRuntimeInfo = findRuntimeInfo(id, type);
      if (existingRuntimeInfo != null) {
        return AppFabricServiceStatus.PROGRAM_ALREADY_RUNNING;
      }

      Program program = store.loadProgram(id, type);
      if (program == null) {
        return AppFabricServiceStatus.PROGRAM_NOT_FOUND;
      }

      Map<String, String> userArgs = store.getRunArguments(id);
View Full Code Here

  private void replaceAppSpecInProgramJar(Id.Program id, ApplicationSpecification appSpec, ProgramType type) {
    try {
      Location programLocation = getProgramLocation(id, type);
      ArchiveBundler bundler = new ArchiveBundler(programLocation);

      Program program = Programs.create(programLocation);
      String className = program.getMainClassName();

      Location tmpProgramLocation = programLocation.getTempFile("");
      try {
        ProgramBundle.create(id.getApplication(), bundler, tmpProgramLocation, id.getId(), className, type, appSpec);
View Full Code Here

        } else {
          LOG.error("Unknown location factory specified");
          return -1;
        }

        Program archive = Programs.createWithUnpack(lf.create(jarFilename), unpackedJarDir);
        Object appMain = archive.getMainClass().newInstance();
        if (!(appMain instanceof Application)) {
          LOG.error(String.format("Application main class is of invalid type: %s",
                                  appMain.getClass().getName()));
          return -1;
        }
View Full Code Here

    return ImmutableMap.copyOf(result);
  }

  private RuntimeInfo createRuntimeInfo(ProgramType type, Id.Program programId, TwillController controller) {
    try {
      Program program = store.loadProgram(programId, type);
      Preconditions.checkNotNull(program, "Program not found");

      ProgramController programController = createController(program, controller);
      return programController == null ? null : new SimpleRuntimeInfo(programController, type, programId);
    } catch (Exception e) {
View Full Code Here

  @Override
  public final ProgramController run(final Program program, final ProgramOptions options) {
    final File hConfFile;
    final File cConfFile;
    final Program copiedProgram;
    final File programDir;    // Temp directory for unpacking the program

    try {
      // Copy config files and program jar to local temp, and ask Twill to localize it to container.
      // What Twill does is to save those files in HDFS and keep using them during the lifetime of application.
      // Twill will manage the cleanup of those files in HDFS.
      hConfFile = saveHConf(hConf, File.createTempFile("hConf", ".xml"));
      cConfFile = saveCConf(cConf, File.createTempFile("cConf", ".xml"));
      programDir = Files.createTempDir();
      copiedProgram = copyProgramJar(program, programDir);
    } catch (IOException e) {
      throw Throwables.propagate(e);
    }

    final String runtimeArgs = new Gson().toJson(options.getUserArguments());

    // Obtains and add the HBase delegation token as well (if in non-secure mode, it's a no-op)
    // Twill would also ignore it if it is not running in secure mode.
    // The HDFS token should already obtained by Twill.
    return launch(copiedProgram, options, hConfFile, cConfFile, new ApplicationLauncher() {
      @Override
      public TwillController launch(TwillApplication twillApplication) {
        TwillPreparer twillPreparer = twillRunner
          .prepare(twillApplication);
        if (options.isDebug()) {
          LOG.info("Starting {} with debugging enabled.", program.getId());
          twillPreparer.enableDebugging();
        }
        TwillController twillController = twillPreparer
          .withDependencies(new HBaseTableUtilFactory().get().getClass())
          .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out)))
          .addSecureStore(YarnSecureStore.create(HBaseTokenUtils.obtainToken(hConf, new Credentials())))
          .withApplicationArguments(
            String.format("--%s", RunnableOptions.JAR), copiedProgram.getJarLocation().getName(),
            String.format("--%s", RunnableOptions.RUNTIME_ARGS), runtimeArgs
          ).start();
        return addCleanupListener(twillController, hConfFile, cConfFile, copiedProgram, programDir);
      }
    });
View Full Code Here

                                     @Nullable String outputDataSetName) {
    Injector injector = prepare();

    // Initializing Program
    LocationFactory locationFactory = injector.getInstance(LocationFactory.class);
    Program program;
    try {
      program = Programs.create(locationFactory.create(programLocation), classLoader);
      // See if it is launched from Workflow, if it is, change the Program.
      if (workflowBatch != null) {
        MapReduceSpecification mapReduceSpec = program.getSpecification().getMapReduce().get(workflowBatch);
        Preconditions.checkArgument(mapReduceSpec != null, "Cannot find MapReduceSpecification for %s", workflowBatch);
        program = new WorkflowMapReduceProgram(program, mapReduceSpec);
      }
    } catch (IOException e) {
      LOG.error("Could not init Program based on location: " + programLocation);
      throw Throwables.propagate(e);
    }

    // Initializing dataset context and hooking it up with mapreduce job transaction

    DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
    CConfiguration configuration = injector.getInstance(CConfiguration.class);

    ApplicationSpecification programSpec = program.getSpecification();

    // if this is not for a mapper or a reducer, we don't need the metrics collection service
    MetricsCollectionService metricsCollectionService =
      (type == null) ? null : injector.getInstance(MetricsCollectionService.class);

    DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);

    // Creating mapreduce job context
    MapReduceSpecification spec = program.getSpecification().getMapReduce().get(program.getName());
    BasicMapReduceContext context =
      new BasicMapReduceContext(program, type, RunIds.fromString(runId),
                                runtimeArguments, programSpec.getDatasets().keySet(), spec, logicalStartTime,
                                workflowBatch, discoveryServiceClient, metricsCollectionService,
                                datasetFramework, configuration);
View Full Code Here

    ProgramRuntimeService.RuntimeInfo existingRuntimeInfo = findRuntimeInfo(programId, programType);
    if (existingRuntimeInfo != null) {
      throw new JobExecutionException(UserMessages.getMessage(UserErrors.ALREADY_RUNNING), false);
    }
    Map<String, String> userArgs;
    Program program;
    try {
      program =  store.loadProgram(programId, ProgramType.WORKFLOW);
      Preconditions.checkNotNull(program, "Program not found");

      userArgs = store.getRunArguments(programId);
View Full Code Here

                                 Transaction tx, ClassLoader classLoader, URI programLocation) {
    Injector injector = prepare();

    // Initializing Program
    LocationFactory locationFactory = injector.getInstance(LocationFactory.class);
    Program program;
    try {
      program = Programs.create(locationFactory.create(programLocation), classLoader);
      //TODO: This should be changed when we support Spark in Workflow
    } catch (IOException e) {
      LOG.error("Could not init Program based on location: " + programLocation);
      throw Throwables.propagate(e);
    }

    // Initializing dataset context and hooking it up with Spark job transaction

    DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
    CConfiguration configuration = injector.getInstance(CConfiguration.class);

    ApplicationSpecification appSpec = program.getSpecification();

    //TODO: Change this when Spark starts supporting Metrics
    MetricsCollectionService metricsCollectionService = null;

    DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);

    // Creating Spark job context
    SparkSpecification sparkSpec = program.getSpecification().getSpark().get(program.getName());
    BasicSparkContext context =
      new BasicSparkContext(program, RunIds.fromString(runId), runtimeArguments, appSpec.getDatasets().keySet(),
                            sparkSpec, logicalStartTime, workflowBatch, metricsCollectionService,
                            datasetFramework, configuration, discoveryServiceClient);
View Full Code Here

    final MapReduceSpecification mapReduceSpec = workflowSpec.getMapReduce().get(name);
    Preconditions.checkArgument(mapReduceSpec != null,
                                "No MapReduce with name %s found in Workflow %s", name, workflowSpec.getName());

    final Program mapReduceProgram = new WorkflowMapReduceProgram(workflowProgram, mapReduceSpec);
    final ProgramOptions options = new SimpleProgramOptions(
      mapReduceProgram.getName(),
      new BasicArguments(ImmutableMap.of(
        ProgramOptionConstants.RUN_ID, runId.getId(),
        ProgramOptionConstants.LOGICAL_START_TIME, Long.toString(logicalStartTime),
        ProgramOptionConstants.WORKFLOW_BATCH, name
      )),
View Full Code Here

TOP

Related Classes of co.cask.cdap.app.program.Program

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.