Package org.apache.sqoop.job.etl

Examples of org.apache.sqoop.job.etl.Loader


        Configuration conf = null;
        if (!isTest) {
          conf = context.getConfiguration();
          loaderName = conf.get(JobConstants.JOB_ETL_LOADER);
        }
        Loader loader = (Loader) ClassUtils.instantiate(loaderName);

        // Objects that should be pass to the Executor execution
        PrefixContext subContext = null;
        Object configConnection = null;
        Object configJob = null;
        Schema schema = null;

        if (!isTest) {
          // Propagate connector schema in every case for now
          // TODO: Change to coditional choosing between HIO and Connector schema
          schema = ConfigurationUtils.getConnectorSchema(conf);

          switch (ConfigurationUtils.getJobType(conf)) {
            case EXPORT:
              subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
              configConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
              configJob = ConfigurationUtils.getConfigConnectorJob(conf);
              break;
            case IMPORT:
              subContext = new PrefixContext(conf, "");
              configConnection = ConfigurationUtils.getConfigFrameworkConnection(conf);
              configJob = ConfigurationUtils.getConfigFrameworkJob(conf);
              break;
            default:
              throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
          }
        }

        // Create loader context
        LoaderContext loaderContext = new LoaderContext(subContext, reader, schema);

        LOG.info("Running loader class " + loaderName);
        loader.load(loaderContext, configConnection, configJob);
        LOG.info("Loader has finished");
      } catch (Throwable t) {
        readerFinished = true;
        LOG.error("Error while loading data out of MR job.", t);
        // Release so that the writer can tell the framework something went
View Full Code Here


    ExportJobConfiguration jobConfig = new ExportJobConfiguration();

    context.setString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL,
        "INSERT INTO " + executor.delimitIdentifier(tableName) + " VALUES (?,?,?)");

    Loader loader = new GenericJdbcExportLoader();
    DummyReader reader = new DummyReader();
    LoaderContext loaderContext = new LoaderContext(context, reader, null);
    loader.load(loaderContext, connectionConfig, jobConfig);

    int index = START;
    ResultSet rs = executor.executeQuery("SELECT * FROM "
        + executor.delimitIdentifier(tableName) + " ORDER BY ICOL");
    while (rs.next()) {
View Full Code Here

        GenericJdbcTestConstants.URL);
    context.setString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL,
        "INSERT INTO " + executor.delimitIdentifier(tableName)
            + " VALUES (?,?,?)");

    Loader loader = new GenericJdbcExportLoader();
    DummyReader reader = new DummyReader();

    loader.load(context, null, null, reader);

    int index = START;
    ResultSet rs = executor.executeQuery("SELECT * FROM "
        + executor.delimitIdentifier(tableName) + " ORDER BY ICOL");
    while (rs.next()) {
View Full Code Here

        conf = context.getConfiguration();


        loaderName = conf.get(JobConstants.JOB_ETL_LOADER);
      }
      Loader loader = (Loader) ClassUtils.instantiate(loaderName);

      // Objects that should be pass to the Executor execution
      PrefixContext subContext = null;
      Object configConnection = null;
      Object configJob = null;

      if (!isTest) {
        switch (ConfigurationUtils.getJobType(conf)) {
          case EXPORT:
            subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
            configConnection = ConfigurationUtils.getConnectorConnection(conf);
            configJob = ConfigurationUtils.getConnectorJob(conf);
            break;
          case IMPORT:
            subContext = new PrefixContext(conf, "");
            configConnection = ConfigurationUtils.getFrameworkConnection(conf);
            configJob = ConfigurationUtils.getFrameworkJob(conf);
            break;
          default:
            readerFinished = true;
            // Release so that the writer can tell the framework something went
            // wrong.
            free.release();
            throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
        }
      }

      try {
        LOG.info("Running loader class " + loaderName);
        loader.load(subContext, configConnection, configJob, reader);
        LOG.info("Loader has finished");
      } catch (Throwable t) {
        readerFinished = true;
        LOG.error("Error while loading data out of MR job.", t);
        // Release so that the writer can tell the framework something went
View Full Code Here

    ExportJobConfiguration jobConfig = new ExportJobConfiguration();

    context.setString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL,
        "INSERT INTO " + executor.delimitIdentifier(tableName) + " VALUES (?,?,?)");

    Loader loader = new GenericJdbcExportLoader();
    DummyReader reader = new DummyReader();
    LoaderContext loaderContext = new LoaderContext(context, reader);
    loader.load(loaderContext, connectionConfig, jobConfig);

    int index = START;
    ResultSet rs = executor.executeQuery("SELECT * FROM "
        + executor.delimitIdentifier(tableName) + " ORDER BY ICOL");
    while (rs.next()) {
View Full Code Here

        Configuration conf = null;
        if (!isTest) {
          conf = context.getConfiguration();
          loaderName = conf.get(JobConstants.JOB_ETL_LOADER);
        }
        Loader loader = (Loader) ClassUtils.instantiate(loaderName);

        // Objects that should be pass to the Executor execution
        PrefixContext subContext = null;
        Object configConnection = null;
        Object configJob = null;

        if (!isTest) {
          switch (ConfigurationUtils.getJobType(conf)) {
            case EXPORT:
              subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
              configConnection = ConfigurationUtils.getConnectorConnection(conf);
              configJob = ConfigurationUtils.getConnectorJob(conf);
              break;
            case IMPORT:
              subContext = new PrefixContext(conf, "");
              configConnection = ConfigurationUtils.getFrameworkConnection(conf);
              configJob = ConfigurationUtils.getFrameworkJob(conf);
              break;
            default:
              throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
          }
        }

        // Create loader context
        LoaderContext loaderContext = new LoaderContext(subContext, reader);

        LOG.info("Running loader class " + loaderName);
        loader.load(loaderContext, configConnection, configJob);
        LOG.info("Loader has finished");
      } catch (Throwable t) {
        readerFinished = true;
        LOG.error("Error while loading data out of MR job.", t);
        // Release so that the writer can tell the framework something went
View Full Code Here

TOP

Related Classes of org.apache.sqoop.job.etl.Loader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.