Package com.cloudera.sqoop.mapreduce

Examples of com.cloudera.sqoop.mapreduce.ImportJobBase


    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in "
            + "classpath, cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
              context);
    }

    checkTableImportOptions(context);

    String splitCol = getSplitColumn(opts, tableName);
    importer.runImport(tableName, jarFile, splitCol, opts.getConf());
  }
View Full Code Here


    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }

    importer.runImport(null, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in "
            + "classpath, cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
              context);
    }

    checkTableImportOptions(context);

    String splitCol = getSplitColumn(opts, tableName);
    importer.runImport(tableName, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }

    importer.runImport(null, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in "
            + "classpath, cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
              context);
    }

    checkTableImportOptions(context);

    String splitCol = getSplitColumn(opts, tableName);
    importer.runImport(tableName, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }

    importer.runImport(null, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in "
            + "classpath, cannot import to HBase!");
      }
      if (!opts.isBulkLoadEnabled()){
        importer = new HBaseImportJob(opts, context);
      } else {
        importer = new HBaseBulkImportJob(opts, context);
      }
    } else if (opts.getAccumuloTable() != null) {
       // Import to Accumulo.
       if (!AccumuloUtil.isAccumuloJarPresent()) {
         throw new ImportException("Accumulo jars are not present in "
             + "classpath, cannot import to Accumulo!");
       }
       importer = new AccumuloImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
              context);
    }

    checkTableImportOptions(context);

    String splitCol = getSplitColumn(opts, tableName);
    importer.runImport(tableName, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      if (!opts.isBulkLoadEnabled()){
        importer = new HBaseImportJob(opts, context);
      } else {
        importer = new HBaseBulkImportJob(opts, context);
      }
    } else if (opts.getAccumuloTable() != null) {
      // Import to Accumulo.
      if (!AccumuloUtil.isAccumuloJarPresent()) {
        throw new ImportException("Accumulo jars are not present in classpath,"
              + " cannot import to Accumulo!");
      }
      importer = new AccumuloImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }

    importer.runImport(null, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in "
            + "classpath, cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
              context);
    }

    checkTableImportOptions(context);

    String splitCol = getSplitColumn(opts, tableName);
    importer.runImport(tableName, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

    String jarFile = context.getJarFile();
    SqoopOptions opts = context.getOptions();

    context.setConnManager(this);

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }

    importer.runImport(null, jarFile, splitCol, opts.getConf());
  }
View Full Code Here

TOP

Related Classes of com.cloudera.sqoop.mapreduce.ImportJobBase

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.