Package org.apache.twill.internal

Examples of org.apache.twill.internal.ApplicationBundler


    }
    return credentials;
  }

  private ApplicationBundler createBundler() {
    return new ApplicationBundler(ImmutableList.<String>of());
  }
View Full Code Here


  }

  private static File createDeploymentJar(LocationFactory locationFactory, Class<?> clz, File...bundleEmbeddedJars)
    throws IOException, InstantiationException, IllegalAccessException {

    ApplicationBundler bundler = new ApplicationBundler(ImmutableList.of("co.cask.tigon.api",
                                                                         "org.apache.hadoop",
                                                                         "org.apache.hbase"));
    Location jarLocation = locationFactory.create(clz.getName()).getTempFile(".jar");
    bundler.createBundle(jarLocation, clz);

    Location deployJar = locationFactory.create(clz.getName()).getTempFile(".jar");

    Flow flow = (Flow) clz.newInstance();
    FlowSpecification flowSpec = new DefaultFlowSpecification(clz.getClass().getName(), flow.configure());
View Full Code Here

  }

  private static File createDeploymentJar(LocationFactory locationFactory, Class<?> clz, File...bundleEmbeddedJars)
    throws IOException {

    ApplicationBundler bundler = new ApplicationBundler(ImmutableList.of("co.cask.cdap.api",
                                                                         "org.apache.hadoop",
                                                                         "org.apache.hbase",
                                                                         "org.apache.hive"));
    Location jarLocation = locationFactory.create(clz.getName()).getTempFile(".jar");
    bundler.createBundle(jarLocation, clz);

    Location deployJar = locationFactory.create(clz.getName()).getTempFile(".jar");

    // Creates Manifest
    Manifest manifest = new Manifest();
View Full Code Here

   * @throws IOException if failed to package the jar through
   *                     {@link ApplicationBundler#createBundle(Location, Iterable, Iterable)}
   */
  private Location buildDependencyJar(BasicSparkContext context, Configuration conf)
    throws IOException {
    ApplicationBundler appBundler = new ApplicationBundler(Lists.newArrayList("org.apache.hadoop", "org.apache.spark"),
                                                           Lists.newArrayList("org.apache.hadoop.hbase",
                                                                              "org.apache.hadoop.hive"));
    Id.Program programId = context.getProgram().getId();

    Location appFabricDependenciesJarLocation =
      locationFactory.create(String.format("%s.%s.%s.%s.%s_temp.jar",
                                           ProgramType.SPARK.name().toLowerCase(), programId.getAccountId(),
                                           programId.getApplicationId(), programId.getId(),
                                           context.getRunId().getId()));

    LOG.debug("Creating Spark Job Dependency jar: {}", appFabricDependenciesJarLocation.toURI());

    URI hConfLocation = writeHConf(context, conf);
    try {
      Set<Class<?>> classes = Sets.newHashSet();
      Set<URI> resources = Sets.newHashSet();

      classes.add(Spark.class);
      classes.add(SparkDatasetInputFormat.class);
      classes.add(SparkDatasetOutputFormat.class);
      classes.add(SparkProgramWrapper.class);
      classes.add(JavaSparkContext.class);
      classes.add(ScalaSparkContext.class);

      // We have to add this Hadoop Configuration to the dependency jar so that when the Spark job runs outside
      // CDAP it can create the BasicMapReduceContext to have access to our datasets, transactions etc.
      resources.add(hConfLocation);

      try {
        Class<?> hbaseTableUtilClass = new HBaseTableUtilFactory().get().getClass();
        classes.add(hbaseTableUtilClass);
      } catch (ProvisionException e) {
        LOG.warn("Not including HBaseTableUtil classes in submitted Job Jar since they are not available");
      }


      ClassLoader oldCLassLoader = Thread.currentThread().getContextClassLoader();
      Thread.currentThread().setContextClassLoader(conf.getClassLoader());
      appBundler.createBundle(appFabricDependenciesJarLocation, classes, resources);
      Thread.currentThread().setContextClassLoader(oldCLassLoader);
    } catch (Exception e) {
      throw Throwables.propagate(e);
    } finally {
      deleteHConfDir(hConfLocation);
View Full Code Here

    Location tempBundle = destination.getTempFile(".jar");
    try {
      ClassLoader remembered = Thread.currentThread().getContextClassLoader();
      Thread.currentThread().setContextClassLoader(clz.getClassLoader());
      try {
        ApplicationBundler bundler = new ApplicationBundler(ImmutableList.of("co.cask.cdap.api",
                                                                             "org.apache.hadoop",
                                                                             "org.apache.hbase",
                                                                             "org.apache.hive"));
        bundler.createBundle(tempBundle, clz);
      } finally {
        Thread.currentThread().setContextClassLoader(remembered);
      }

      // Create the program jar for deployment. It removes the "classes/" prefix as that's the convention taken
View Full Code Here

  @Test
  public void testFindDependencies() throws IOException, ClassNotFoundException {
    Location location = new LocalLocationFactory(tmpDir.newFolder()).create("test.jar");

    // Create a jar file with by tracing dependency
    ApplicationBundler bundler = new ApplicationBundler(ImmutableList.<String>of());
    bundler.createBundle(location, ApplicationBundler.class);

    File targetDir = tmpDir.newFolder();
    unjar(new File(location.toURI()), targetDir);

    // Load the class back, it should be loaded by the custom classloader
View Full Code Here

    }
    return credentials;
  }

  private ApplicationBundler createBundler() {
    return new ApplicationBundler(ImmutableList.<String>of());
  }
View Full Code Here

   * Creates a jar that contains everything that are needed for running the MapReduce program by Hadoop.
   *
   * @return a new {@link Location} containing the job jar
   */
  private Location buildJobJar(BasicMapReduceContext context) throws IOException {
    ApplicationBundler appBundler = new ApplicationBundler(ImmutableList.of("org.apache.hadoop"),
                                                           ImmutableList.of("org.apache.hadoop.hbase",
                                                                            "org.apache.hadoop.hive"));
    Id.Program programId = context.getProgram().getId();

    Location jobJar =
      locationFactory.create(String.format("%s.%s.%s.%s.%s.jar",
                                           ProgramType.MAPREDUCE.name().toLowerCase(),
                                           programId.getAccountId(), programId.getApplicationId(),
                                           programId.getId(), context.getRunId().getId()));

    LOG.debug("Creating Job jar: {}", jobJar.toURI());

    Set<Class<?>> classes = Sets.newHashSet();
    classes.add(MapReduce.class);
    classes.add(DataSetOutputFormat.class);
    classes.add(DataSetInputFormat.class);
    classes.add(TextStreamInputFormat.class);
    classes.add(MapperWrapper.class);
    classes.add(ReducerWrapper.class);

    Job jobConf = context.getHadoopJob();
    try {
      Class<? extends InputFormat<?, ?>> inputFormatClass = jobConf.getInputFormatClass();
      LOG.info("InputFormat class: {} {}", inputFormatClass, inputFormatClass.getClassLoader());
      classes.add(inputFormatClass);
    } catch (Throwable t) {
      LOG.info("InputFormat class not found: {}", t.getMessage(), t);
      // Ignore
    }
    try {
      Class<? extends OutputFormat<?, ?>> outputFormatClass = jobConf.getOutputFormatClass();
      LOG.info("OutputFormat class: {} {}", outputFormatClass, outputFormatClass.getClassLoader());
      classes.add(outputFormatClass);
    } catch (Throwable t) {
      LOG.info("OutputFormat class not found: {}", t.getMessage(), t);
      // Ignore
    }

    try {
      Class<?> hbaseTableUtilClass = new HBaseTableUtilFactory().get().getClass();
      classes.add(hbaseTableUtilClass);
    } catch (ProvisionException e) {
      LOG.warn("Not including HBaseTableUtil classes in submitted Job Jar since they are not available");
    }

    ClassLoader oldCLassLoader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(jobConf.getConfiguration().getClassLoader());
    appBundler.createBundle(jobJar, classes);
    Thread.currentThread().setContextClassLoader(oldCLassLoader);

    LOG.info("Built MapReduce Job Jar at {}", jobJar.toURI());
    return jobJar;
  }
View Full Code Here

TOP

Related Classes of org.apache.twill.internal.ApplicationBundler

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.