Examples of MRSubmissionRequest


Examples of org.apache.sqoop.execution.mapreduce.MRSubmissionRequest

   * {@inheritDoc}
   */
  @Override
  public boolean submit(SubmissionRequest generalRequest) {
    // We're supporting only map reduce jobs
    MRSubmissionRequest request = (MRSubmissionRequest) generalRequest;

    // Clone global configuration
    Configuration configuration = new Configuration(globalConfiguration);

    // Serialize job type as it will be needed by underlying execution engine
    ConfigurationUtils.setJobType(configuration, request.getJobType());

    // Serialize framework context into job configuration
    for(Map.Entry<String, String> entry: request.getFrameworkContext()) {
      if (entry.getValue() == null) {
        LOG.warn("Ignoring null framework context value for key " + entry.getKey());
        continue;
      }
      configuration.set(entry.getKey(), entry.getValue());
    }

    // Serialize connector context as a sub namespace
    for(Map.Entry<String, String> entry :request.getConnectorContext()) {
      if (entry.getValue() == null) {
        LOG.warn("Ignoring null connector context value for key " + entry.getKey());
        continue;
      }
      configuration.set(
        JobConstants.PREFIX_CONNECTOR_CONTEXT + entry.getKey(),
        entry.getValue());
    }

    // Set up notification URL if it's available
    if(request.getNotificationUrl() != null) {
      configuration.set("job.end.notification.url", request.getNotificationUrl());
    }

    // Turn off speculative execution
    configuration.setBoolean("mapred.map.tasks.speculative.execution", false);
    configuration.setBoolean("mapred.reduce.tasks.speculative.execution", false);

    // Promote all required jars to the job
    StringBuilder sb = new StringBuilder();
    boolean first = true;
    for(String jar : request.getJars()) {
      if(first) {
        first = false;
      } else {
        sb.append(",");
      }
      LOG.debug("Adding jar to the job: " + jar);
      sb.append(jar);
    }
    configuration.set("tmpjars", sb.toString());

    try {
      Job job = new Job(configuration);

      // And finally put all configuration objects to credentials cache
      ConfigurationUtils.setConfigConnectorConnection(job, request.getConfigConnectorConnection());
      ConfigurationUtils.setConfigConnectorJob(job, request.getConfigConnectorJob());
      ConfigurationUtils.setConfigFrameworkConnection(job, request.getConfigFrameworkConnection());
      ConfigurationUtils.setConfigFrameworkJob(job, request.getConfigFrameworkJob());

      if(request.getJobName() != null) {
        job.setJobName("Sqoop: " + request.getJobName());
      } else {
        job.setJobName("Sqoop job with id: " + request.getJobId());
      }

      job.setInputFormatClass(request.getInputFormatClass());

      job.setMapperClass(request.getMapperClass());
      job.setMapOutputKeyClass(request.getMapOutputKeyClass());
      job.setMapOutputValueClass(request.getMapOutputValueClass());

      String outputDirectory = request.getOutputDirectory();
      if(outputDirectory != null) {
        FileOutputFormat.setOutputPath(job, new Path(outputDirectory));
      }

      // Set number of reducers as number of configured loaders  or suppress
      // reduce phase entirely if loaders are not set at all.
      if(request.getLoaders() != null) {
        job.setNumReduceTasks(request.getLoaders());
      } else {
        job.setNumReduceTasks(0);
      }

      job.setOutputFormatClass(request.getOutputFormatClass());
      job.setOutputKeyClass(request.getOutputKeyClass());
      job.setOutputValueClass(request.getOutputValueClass());

      // If we're in local mode than wait on completion. Local job runner do not
      // seems to be exposing API to get previously submitted job which makes
      // other methods of the submission engine quite useless.
      if(isLocal()) {
        job.waitForCompletion(true);
      } else {
        job.submit();
      }

      String jobId = job.getJobID().toString();
      request.getSummary().setExternalId(jobId);
      request.getSummary().setExternalLink(job.getTrackingURL());

      LOG.debug("Executed new map-reduce job with id " + jobId);
    } catch (Exception e) {
      request.getSummary().setException(e);
      LOG.error("Error in submitting job", e);
      return false;
    }
    return true;
  }
View Full Code Here

Examples of org.apache.sqoop.execution.mapreduce.MRSubmissionRequest

   * {@inheritDoc}
   */
  @Override
  public boolean submit(SubmissionRequest generalRequest) {
    // We're supporting only map reduce jobs
    MRSubmissionRequest request = (MRSubmissionRequest) generalRequest;

    // Clone global configuration
    Configuration configuration = new Configuration(globalConfiguration);

    // Serialize job type as it will be needed by underlying execution engine
    configuration.set(JobConstants.JOB_TYPE, request.getJobType().name());

    // Serialize framework context into job configuration
    for(Map.Entry<String, String> entry: request.getFrameworkContext()) {
      if (entry.getValue() == null) {
        LOG.warn("Ignoring null framework context value for key " + entry.getKey());
        continue;
      }
      configuration.set(entry.getKey(), entry.getValue());
    }

    // Serialize connector context as a sub namespace
    for(Map.Entry<String, String> entry :request.getConnectorContext()) {
      if (entry.getValue() == null) {
        LOG.warn("Ignoring null connector context value for key " + entry.getKey());
        continue;
      }
      configuration.set(
        JobConstants.PREFIX_CONNECTOR_CONTEXT + entry.getKey(),
        entry.getValue());
    }

    // Serialize configuration objects - Firstly configuration classes
    configuration.set(JobConstants.JOB_CONFIG_CLASS_CONNECTOR_CONNECTION,
      request.getConfigConnectorConnection().getClass().getName());
    configuration.set(JobConstants.JOB_CONFIG_CLASS_CONNECTOR_JOB,
      request.getConfigConnectorJob().getClass().getName());
    configuration.set(JobConstants.JOB_CONFIG_CLASS_FRAMEWORK_CONNECTION,
      request.getConfigFrameworkConnection().getClass().getName());
    configuration.set(JobConstants.JOB_CONFIG_CLASS_FRAMEWORK_JOB,
      request.getConfigFrameworkJob().getClass().getName());

    // And finally configuration data
    configuration.set(JobConstants.JOB_CONFIG_CONNECTOR_CONNECTION,
      FormUtils.toJson(request.getConfigConnectorConnection()));
    configuration.set(JobConstants.JOB_CONFIG_CONNECTOR_JOB,
      FormUtils.toJson(request.getConfigConnectorJob()));
    configuration.set(JobConstants.JOB_CONFIG_FRAMEWORK_CONNECTION,
      FormUtils.toJson(request.getConfigFrameworkConnection()));
    configuration.set(JobConstants.JOB_CONFIG_FRAMEWORK_JOB,
      FormUtils.toJson(request.getConfigFrameworkConnection()));

    // Set up notification URL if it's available
    if(request.getNotificationUrl() != null) {
      configuration.set("job.end.notification.url", request.getNotificationUrl());
    }

    // Turn off speculative execution
    configuration.setBoolean("mapred.map.tasks.speculative.execution", false);
    configuration.setBoolean("mapred.reduce.tasks.speculative.execution", false);

    // Promote all required jars to the job
    StringBuilder sb = new StringBuilder();
    boolean first = true;
    for(String jar : request.getJars()) {
      if(first) {
        first = false;
      } else {
        sb.append(",");
      }
      LOG.debug("Adding jar to the job: " + jar);
      sb.append(jar);
    }
    configuration.set("tmpjars", sb.toString());

    try {
      Job job = new Job(configuration);

      if(request.getJobName() != null) {
        job.setJobName("Sqoop: " + request.getJobName());
      } else {
        job.setJobName("Sqoop job with id: " + request.getJobId());
      }

      job.setInputFormatClass(request.getInputFormatClass());

      job.setMapperClass(request.getMapperClass());
      job.setMapOutputKeyClass(request.getMapOutputKeyClass());
      job.setMapOutputValueClass(request.getMapOutputValueClass());

      String outputDirectory = request.getOutputDirectory();
      if(outputDirectory != null) {
        FileOutputFormat.setOutputPath(job, new Path(outputDirectory));
      }

      // Set number of reducers as number of configured loaders  or suppress
      // reduce phase entirely if loaders are not set at all.
      if(request.getLoaders() != null) {
        job.setNumReduceTasks(request.getLoaders());
      } else {
        job.setNumReduceTasks(0);
      }

      job.setOutputFormatClass(request.getOutputFormatClass());
      job.setOutputKeyClass(request.getOutputKeyClass());
      job.setOutputValueClass(request.getOutputValueClass());

      job.submit();

      String jobId = job.getJobID().toString();
      request.getSummary().setExternalId(jobId);
      request.getSummary().setExternalLink(job.getTrackingURL());

      LOG.debug("Executed new map-reduce job with id " + jobId);
    } catch (Exception e) {
      request.getSummary().setException(e);
      LOG.error("Error in submitting job", e);
      return false;
    }
    return true;
  }
View Full Code Here

Examples of org.apache.sqoop.execution.mapreduce.MRSubmissionRequest

   * {@inheritDoc}
   */
  @Override
  public boolean submit(SubmissionRequest generalRequest) {
    // We're supporting only map reduce jobs
    MRSubmissionRequest request = (MRSubmissionRequest) generalRequest;

    // Clone global configuration
    Configuration configuration = new Configuration(globalConfiguration);

    // Serialize job type as it will be needed by underlying execution engine
    configuration.set(JobConstants.JOB_TYPE, request.getJobType().name());

    // Serialize framework context into job configuration
    for(Map.Entry<String, String> entry: request.getFrameworkContext()) {
      if (entry.getValue() == null) {
        LOG.warn("Ignoring null framework context value for key " + entry.getKey());
        continue;
      }
      configuration.set(entry.getKey(), entry.getValue());
    }

    // Serialize connector context as a sub namespace
    for(Map.Entry<String, String> entry :request.getConnectorContext()) {
      if (entry.getValue() == null) {
        LOG.warn("Ignoring null connector context value for key " + entry.getKey());
        continue;
      }
      configuration.set(
        JobConstants.PREFIX_CONNECTOR_CONTEXT + entry.getKey(),
        entry.getValue());
    }

    // Serialize configuration objects - Firstly configuration classes
    configuration.set(JobConstants.JOB_CONFIG_CLASS_CONNECTOR_CONNECTION,
      request.getConfigConnectorConnection().getClass().getName());
    configuration.set(JobConstants.JOB_CONFIG_CLASS_CONNECTOR_JOB,
      request.getConfigConnectorJob().getClass().getName());
    configuration.set(JobConstants.JOB_CONFIG_CLASS_FRAMEWORK_CONNECTION,
      request.getConfigFrameworkConnection().getClass().getName());
    configuration.set(JobConstants.JOB_CONFIG_CLASS_FRAMEWORK_JOB,
      request.getConfigFrameworkJob().getClass().getName());

    // Set up notification URL if it's available
    if(request.getNotificationUrl() != null) {
      configuration.set("job.end.notification.url", request.getNotificationUrl());
    }

    // Turn off speculative execution
    configuration.setBoolean("mapred.map.tasks.speculative.execution", false);
    configuration.setBoolean("mapred.reduce.tasks.speculative.execution", false);

    // Promote all required jars to the job
    StringBuilder sb = new StringBuilder();
    boolean first = true;
    for(String jar : request.getJars()) {
      if(first) {
        first = false;
      } else {
        sb.append(",");
      }
      LOG.debug("Adding jar to the job: " + jar);
      sb.append(jar);
    }
    configuration.set("tmpjars", sb.toString());

    try {
      Job job = new Job(configuration);

      // And finally put all configuration objects to credentials cache
      Credentials credentials = job.getCredentials();
      credentials.addSecretKey(JobConstants.JOB_CONFIG_CONNECTOR_CONNECTION_KEY,
        FormUtils.toJson(request.getConfigConnectorConnection()).getBytes());
      credentials.addSecretKey(JobConstants.JOB_CONFIG_CONNECTOR_JOB_KEY,
        FormUtils.toJson(request.getConfigConnectorJob()).getBytes());
      credentials.addSecretKey(JobConstants.JOB_CONFIG_FRAMEWORK_CONNECTION_KEY,
        FormUtils.toJson(request.getConfigFrameworkConnection()).getBytes());
      credentials.addSecretKey(JobConstants.JOB_CONFIG_FRAMEWORK_JOB_KEY,
        FormUtils.toJson(request.getConfigFrameworkConnection()).getBytes());

      if(request.getJobName() != null) {
        job.setJobName("Sqoop: " + request.getJobName());
      } else {
        job.setJobName("Sqoop job with id: " + request.getJobId());
      }

      job.setInputFormatClass(request.getInputFormatClass());

      job.setMapperClass(request.getMapperClass());
      job.setMapOutputKeyClass(request.getMapOutputKeyClass());
      job.setMapOutputValueClass(request.getMapOutputValueClass());

      String outputDirectory = request.getOutputDirectory();
      if(outputDirectory != null) {
        FileOutputFormat.setOutputPath(job, new Path(outputDirectory));
      }

      // Set number of reducers as number of configured loaders  or suppress
      // reduce phase entirely if loaders are not set at all.
      if(request.getLoaders() != null) {
        job.setNumReduceTasks(request.getLoaders());
      } else {
        job.setNumReduceTasks(0);
      }

      job.setOutputFormatClass(request.getOutputFormatClass());
      job.setOutputKeyClass(request.getOutputKeyClass());
      job.setOutputValueClass(request.getOutputValueClass());

      // If we're in local mode than wait on completion. Local job runner do not
      // seems to be exposing API to get previously submitted job which makes
      // other methods of the submission engine quite useless.
      if(isLocal()) {
        job.waitForCompletion(true);
      } else {
        job.submit();
      }

      String jobId = job.getJobID().toString();
      request.getSummary().setExternalId(jobId);
      request.getSummary().setExternalLink(job.getTrackingURL());

      LOG.debug("Executed new map-reduce job with id " + jobId);
    } catch (Exception e) {
      request.getSummary().setException(e);
      LOG.error("Error in submitting job", e);
      return false;
    }
    return true;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.