Package co.cask.cdap.api.spark

Examples of co.cask.cdap.api.spark.SparkSpecification$Builder$AfterDescription


                                   Map<String, String> properties) {
    this(null, name, description, mainClassName, properties);
  }

  public DefaultSparkSpecification(Spark spark) {
    SparkSpecification configureSpec = spark.configure();

    Map<String, String> properties = Maps.newHashMap(configureSpec.getProperties());

    Reflections.visit(spark, TypeToken.of(spark.getClass()),
                      new PropertyFieldExtractor(properties));

    this.className = spark.getClass().getName();
    this.name = configureSpec.getName();
    this.description = configureSpec.getDescription();
    this.mainClassName = configureSpec.getMainClassName();
    this.properties = ImmutableMap.copyOf(properties);
  }
View Full Code Here


    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");

    final SparkSpecification spec = appSpec.getSpark().get(program.getName());
    Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());

    // Optionally get runId. If the spark started by other program (e.g. Workflow), it inherit the runId.
    Arguments arguments = options.getArguments();
    RunId runId = arguments.hasOption(ProgramOptionConstants.RUN_ID) ? RunIds.fromString(arguments.getOption
      (ProgramOptionConstants.RUN_ID)) : RunIds.generate();

    long logicalStartTime = arguments.hasOption(ProgramOptionConstants.LOGICAL_START_TIME)
      ? Long.parseLong(arguments.getOption(ProgramOptionConstants.LOGICAL_START_TIME)) : System.currentTimeMillis();

    String workflowBatch = arguments.getOption(ProgramOptionConstants.WORKFLOW_BATCH);

    Spark spark;
    try {
      spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
    } catch (Exception e) {
      LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
      throw Throwables.propagate(e);
    }

    final BasicSparkContext context = new BasicSparkContext(program, runId, options.getUserArguments(),
                                                            program.getSpecification().getDatasets().keySet(), spec,
View Full Code Here

    MetricsCollectionService metricsCollectionService = null;

    DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);

    // Creating Spark job context
    SparkSpecification sparkSpec = program.getSpecification().getSpark().get(program.getName());
    BasicSparkContext context =
      new BasicSparkContext(program, RunIds.fromString(runId), runtimeArguments, appSpec.getDatasets().keySet(),
                            sparkSpec, logicalStartTime, workflowBatch, metricsCollectionService,
                            datasetFramework, configuration, discoveryServiceClient);
View Full Code Here

TOP

Related Classes of co.cask.cdap.api.spark.SparkSpecification$Builder$AfterDescription

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.