Package org.apache.hive.hcatalog.data.schema

Examples of org.apache.hive.hcatalog.data.schema.HCatFieldSchema


        }
      }
      if (skip) {
        continue;
      }
      HCatFieldSchema hfs = hCatFullTableSchema.get(hfn);
      if (debugHCatImportMapper) {
        LOG.debug("SqoopRecordVal: field = " + key + " Val " + val
          + " of type " + (val == null ? null : val.getClass().getName())
          + ", hcattype " + hfs.getTypeString());
      }
      Object hCatVal = toHCat(val, hfs);

      result.set(hfn, hCatFullTableSchema, hCatVal);
    }
View Full Code Here


    LOG.info("HCatalog table partitioning key fields = "
      + Arrays.toString(hCatPartitionSchema.getFieldNames().toArray()));

    List<HCatFieldSchema> outputFieldList = new ArrayList<HCatFieldSchema>();
    for (String col : dbColumnNames) {
      HCatFieldSchema hfs = hCatFullTableSchema.get(col);
      if (hfs == null) {
        throw new IOException("Database column " + col + " not found in "
          + " hcatalog table.");
      }
      if (hCatStaticPartitionKeys != null
View Full Code Here

      if (!found) {
        throw new IOException("Database column " + col + " not found in "
          + "hcatalog table schema or partition schema");
      }
      if (!userMapped) {
        HCatFieldSchema hCatFS = hCatFullTableSchema.get(col);
        if (!hCatFS.getTypeString().equals(hCatColType)) {
          LOG.warn("The HCatalog field " + col + " has type "
            + hCatFS.getTypeString() + ".  Expected = " + hCatColType
            + " based on database column type : "
            + sqlTypeString(colInfo.get(0)));
          LOG.warn("The Sqoop job can fail if types are not "
            + " assignment compatible");
        }
View Full Code Here

        }
      }
      if (skip) {
        continue;
      }
      HCatFieldSchema hfs = hCatFullTableSchema.get(hfn);
      if (debugHCatImportMapper) {
        LOG.debug("SqoopRecordVal: field = " + key + " Val " + val
          + " of type " + (val == null ? null : val.getClass().getName())
          + ", hcattype " + hfs.getTypeString());
      }
      Object hCatVal = toHCat(val, hfs);

      result.set(hfn, hCatFullTableSchema, hCatVal);
    }
View Full Code Here

    LOG.info("HCatalog table partitioning key fields = "
      + Arrays.toString(hCatPartitionSchema.getFieldNames().toArray()));

    List<HCatFieldSchema> outputFieldList = new ArrayList<HCatFieldSchema>();
    for (String col : dbColumnNames) {
      HCatFieldSchema hfs = hCatFullTableSchema.get(col);
      if (hfs == null) {
        throw new IOException("Database column " + col + " not found in "
          + " hcatalog table.");
      }
      boolean skip=false;
View Full Code Here

      if (!found) {
        throw new IOException("Database column " + col + " not found in "
          + "hcatalog table schema or partition schema");
      }
      if (!userMapped) {
        HCatFieldSchema hCatFS = hCatFullTableSchema.get(col);
        if (!hCatFS.getTypeString().equals(hCatColType)) {
          LOG.warn("The HCatalog field " + col + " has type "
            + hCatFS.getTypeString() + ".  Expected = " + hCatColType
            + " based on database column type : "
            + sqlTypeString(colInfo.get(0)));
          LOG.warn("The Sqoop job can fail if types are not "
            + " assignment compatible");
        }
View Full Code Here

    String warehouseDir = System
      .getProperty("test.warehouse.dir", "/user/hive/warehouse");
    String expectedDir = fixPath(warehouseDir).replaceFirst("pfile:///", "pfile:/");
    assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
    ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
    cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
    cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
    HCatCreateTableDesc tableDesc = HCatCreateTableDesc
      .create(db, tableOne, cols).fileFormat("rcfile").build();
    client.createTable(tableDesc);
    HCatTable table1 = client.getTable(db, tableOne);
    assertTrue(table1.getInputFileFormat().equalsIgnoreCase(
View Full Code Here

    HCatCreateDBDesc dbDesc = HCatCreateDBDesc.create(dbName)
      .ifNotExists(true).build();
    client.createDatabase(dbDesc);
    ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
    cols.add(new HCatFieldSchema("userid", Type.INT, "id columns"));
    cols.add(new HCatFieldSchema("viewtime", Type.BIGINT,
      "view time columns"));
    cols.add(new HCatFieldSchema("pageurl", Type.STRING, ""));
    cols.add(new HCatFieldSchema("ip", Type.STRING,
      "IP Address of the User"));

    ArrayList<HCatFieldSchema> ptnCols = new ArrayList<HCatFieldSchema>();
    ptnCols.add(new HCatFieldSchema("dt", Type.STRING, "date column"));
    ptnCols.add(new HCatFieldSchema("country", Type.STRING,
      "country column"));
    HCatCreateTableDesc tableDesc = HCatCreateTableDesc
      .create(dbName, tableName, cols).fileFormat("sequencefile")
      .partCols(ptnCols).build();
    client.createTable(tableDesc);
View Full Code Here

    String cloneTable = "tabletwo";
    client.dropTable(null, tableName, true);
    client.dropTable(null, cloneTable, true);

    ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
    cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
    cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
    HCatCreateTableDesc tableDesc = HCatCreateTableDesc
      .create(null, tableName, cols).fileFormat("rcfile").build();
    client.createTable(tableDesc);
    // create a new table similar to previous one.
    client.createTableLike(null, tableName, cloneTable, true, false, null);
View Full Code Here

    String tableName = "temptable";
    String newName = "mytable";
    client.dropTable(null, tableName, true);
    client.dropTable(null, newName, true);
    ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
    cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
    cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
    HCatCreateTableDesc tableDesc = HCatCreateTableDesc
      .create(null, tableName, cols).fileFormat("rcfile").build();
    client.createTable(tableDesc);
    client.renameTable(null, tableName, newName);
    try {
View Full Code Here

TOP

Related Classes of org.apache.hive.hcatalog.data.schema.HCatFieldSchema

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.