Package co.cask.cdap.app

Examples of co.cask.cdap.app.ApplicationSpecification


  }

  @Override
  public ProgramController run(Program program, ProgramOptions options) {
    // Extract and verify parameters
    final ApplicationSpecification appSpec = program.getSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");

    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");

    final SparkSpecification spec = appSpec.getSpark().get(program.getName());
    Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());

    // Optionally get runId. If the spark started by other program (e.g. Workflow), it inherit the runId.
    Arguments arguments = options.getArguments();
    RunId runId = arguments.hasOption(ProgramOptionConstants.RUN_ID) ? RunIds.fromString(arguments.getOption
View Full Code Here


  @Override
  public ProgramController run(Program program, ProgramOptions options) {
    try {
      // Extract and verify parameters
      ApplicationSpecification appSpec = program.getSpecification();
      Preconditions.checkNotNull(appSpec, "Missing application specification.");

      ProgramType processorType = program.getType();
      Preconditions.checkNotNull(processorType, "Missing processor type.");
      Preconditions.checkArgument(processorType == ProgramType.PROCEDURE, "Only PROCEDURE process type is supported.");

      ProcedureSpecification procedureSpec = appSpec.getProcedures().get(program.getName());
      Preconditions.checkNotNull(procedureSpec, "Missing ProcedureSpecification for %s", program.getName());

      int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "0"));

      int instanceCount = appSpec.getProcedures().get(program.getName()).getInstances();
      Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");

      RunId runId = RunIds.generate();

      BasicProcedureContextFactory contextFactory = createContextFactory(program, runId, instanceId, instanceCount,
View Full Code Here

  }

  @Override
  public ProgramController run(Program program, ProgramOptions options) {
    // Extract and verify parameters
    ApplicationSpecification appSpec = program.getSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");

    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported.");

    FlowSpecification flowSpec = appSpec.getFlows().get(program.getName());
    Preconditions.checkNotNull(flowSpec, "Missing FlowSpecification for %s", program.getName());

    try {
      // Launch flowlet program runners
      RunId runId = RunIds.generate();
View Full Code Here

    // Initializing dataset context and hooking it up with Spark job transaction

    DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
    CConfiguration configuration = injector.getInstance(CConfiguration.class);

    ApplicationSpecification appSpec = program.getSpecification();

    //TODO: Change this when Spark starts supporting Metrics
    MetricsCollectionService metricsCollectionService = null;

    DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);

    // Creating Spark job context
    SparkSpecification sparkSpec = program.getSpecification().getSpark().get(program.getName());
    BasicSparkContext context =
      new BasicSparkContext(program, RunIds.fromString(runId), runtimeArguments, appSpec.getDatasets().keySet(),
                            sparkSpec, logicalStartTime, workflowBatch, metricsCollectionService,
                            datasetFramework, configuration, discoveryServiceClient);

    // propagating tx to all txAware guys
    // The tx is committed or aborted depending upon the job success by the ProgramRunner and DatasetRecordWriter
View Full Code Here

  }

  @Override
  public ProgramController run(Program program, ProgramOptions options) {
    // Extract and verify parameters
    ApplicationSpecification appSpec = program.getSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");

    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MAPREDUCE process type is supported.");

    MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
    Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());

    // Optionally get runId. If the map-reduce started by other program (e.g. Workflow), it inherit the runId.
    Arguments arguments = options.getArguments();
    RunId runId = arguments.hasOption(ProgramOptionConstants.RUN_ID)
View Full Code Here

  }

  @Override
  public ProgramController run(Program program, ProgramOptions options) {
    // Extract and verify options
    ApplicationSpecification appSpec = program.getSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");

    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.WORKFLOW, "Only WORKFLOW process type is supported.");

    WorkflowSpecification workflowSpec = appSpec.getWorkflows().get(program.getName());
    Preconditions.checkNotNull(workflowSpec, "Missing WorkflowSpecification for %s", program.getName());

    RunId runId = RunIds.generate();
    WorkflowDriver driver = new WorkflowDriver(program, runId, options, hostname, workflowSpec, mapReduceProgramRunner);
View Full Code Here

      // These services need to be starting before initializing the delegate since they are used in
      // AbstractContext's constructor to create datasets.
      Futures.getUnchecked(
        Services.chainStart(zkClientService, kafkaClientService, metricsCollectionService, resourceReporter));

      ApplicationSpecification appSpec = program.getSpecification();
      String processorName = program.getName();
      runnableName = programOpts.getName();

      Arguments arguments = programOpts.getArguments();
      RunId runId = arguments.hasOption(ProgramOptionConstants.RUN_ID)
        ? RunIds.fromString(arguments.getOption(ProgramOptionConstants.RUN_ID))
        : RunIds.generate();

      ServiceSpecification serviceSpec = appSpec.getServices().get(processorName);
      final RuntimeSpecification runtimeSpec = serviceSpec.getRunnables().get(runnableName);
      String className = runtimeSpec.getRunnableSpecification().getClassName();
      LOG.info("Getting class : {}", program.getMainClass().getName());
      Class<?> clz = Class.forName(className, true, program.getClassLoader());
      Preconditions.checkArgument(TwillRunnable.class.isAssignableFrom(clz), "%s is not a TwillRunnable.", clz);
View Full Code Here

TOP

Related Classes of co.cask.cdap.app.ApplicationSpecification

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.