Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.PartitionSpec$PredicateSpec


    List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();

    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
      Tree partSpecTree = ast.getChild(childIndex);
      if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
        PartitionSpec partSpec = new PartitionSpec();

        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
          CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
          assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
          String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
          String operator = partSpecSingleKey.getChild(1).getText();
          String val = partSpecSingleKey.getChild(2).getText();
          partSpec.addPredicate(key, operator, val);
        }

        partSpecList.add(partSpec);
      }
    }
View Full Code Here


    Table tab = getTable(tblName);

    Iterator<PartitionSpec> i;
    int index;
    for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
      PartitionSpec partSpec = i.next();
      List<Partition> parts = null;
      if (stringPartitionColumns) {
        try {
          parts = db.getPartitionsByFilter(tab, partSpec.toString());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }
      else {
        try {
          parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }

      if (parts.isEmpty()) {
        if (throwIfNonExistent) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
        }
      }
      for (Partition p : parts) {
        if (!ignoreProtection && !p.canDrop()) {
          throw new SemanticException(
View Full Code Here

    List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();

    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
      Tree partSpecTree = ast.getChild(childIndex);
      if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
        PartitionSpec partSpec = new PartitionSpec();

        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
          CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
          assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
          String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
          String operator = partSpecSingleKey.getChild(1).getText();
          String val = partSpecSingleKey.getChild(2).getText();
          partSpec.addPredicate(key, operator, val);
        }

        partSpecList.add(partSpec);
      }
    }
View Full Code Here

    }

    Iterator<PartitionSpec> i;
    int index;
    for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
      PartitionSpec partSpec = i.next();
      List<Partition> parts = null;
      if (stringPartitionColumns) {
        try {
          parts = db.getPartitionsByFilter(tab, partSpec.toString());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }
      else {
        try {
          parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }

      if (parts.isEmpty()) {
        if (throwIfNonExistent) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
        }
      }
      for (Partition p : parts) {
        outputs.add(new WriteEntity(p));
      }
View Full Code Here

    List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();

    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
      Tree partSpecTree = ast.getChild(childIndex);
      if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
        PartitionSpec partSpec = new PartitionSpec();

        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
          CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
          assert(partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
          String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
          String operator = partSpecSingleKey.getChild(1).getText();
          String val = partSpecSingleKey.getChild(2).getText();
          partSpec.addPredicate(key, operator, val);
        }

        partSpecList.add(partSpec);
      }
    }
View Full Code Here

    }

    Iterator<PartitionSpec> i;
    int index;
    for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
      PartitionSpec partSpec = i.next();
      List<Partition> parts = null;
      try {
        parts = db.getPartitionsByFilter(tab, partSpec.toString());
      } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
      }
      if (parts.isEmpty()) {
        if(throwIfNonExistent) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
        }
      }
      for(Partition p: parts) {
        outputs.add(new WriteEntity(p));
      }
View Full Code Here

    List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();

    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
      Tree partSpecTree = ast.getChild(childIndex);
      if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
        PartitionSpec partSpec = new PartitionSpec();

        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
          CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
          assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
          String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
          String operator = partSpecSingleKey.getChild(1).getText();
          String val = partSpecSingleKey.getChild(2).getText();
          partSpec.addPredicate(key, operator, val);
        }

        partSpecList.add(partSpec);
      }
    }
View Full Code Here

    Table tab = getTable(tblName);

    Iterator<PartitionSpec> i;
    int index;
    for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
      PartitionSpec partSpec = i.next();
      List<Partition> parts = null;
      if (stringPartitionColumns) {
        try {
          parts = db.getPartitionsByFilter(tab, partSpec.toString());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }
      else {
        try {
          parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }

      if (parts.isEmpty()) {
        if (throwIfNonExistent) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
        }
      }
      for (Partition p : parts) {
        if (!ignoreProtection && !p.canDrop()) {
          throw new SemanticException(
View Full Code Here

    List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();

    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
      Tree partSpecTree = ast.getChild(childIndex);
      if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
        PartitionSpec partSpec = new PartitionSpec();

        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
          CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
          assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
          String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
          String operator = partSpecSingleKey.getChild(1).getText();
          String val = partSpecSingleKey.getChild(2).getText();
          partSpec.addPredicate(key, operator, val);
        }

        partSpecList.add(partSpec);
      }
    }
View Full Code Here

    Table tab = getTable(tblName);

    Iterator<PartitionSpec> i;
    int index;
    for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
      PartitionSpec partSpec = i.next();
      List<Partition> parts = null;
      if (stringPartitionColumns) {
        try {
          parts = db.getPartitionsByFilter(tab, partSpec.toString());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }
      else {
        try {
          parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
        } catch (Exception e) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
        }
      }

      if (parts.isEmpty()) {
        if (throwIfNonExistent) {
          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
        }
      }
      for (Partition p : parts) {
        if (!ignoreProtection && !p.canDrop()) {
          throw new SemanticException(
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.PartitionSpec$PredicateSpec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.