Package org.apache.hadoop.hive.ql

Examples of org.apache.hadoop.hive.ql.QueryPlan


  }

  @Test
  public void testDelete() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DELETE);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.openTxn("fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1,
View Full Code Here


  }

  @Test
  public void testRollback() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DELETE);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.openTxn("fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1,
View Full Code Here

  }

  @Test
  public void testDDLExclusive() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_EXCLUSIVE);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1,
        TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
View Full Code Here

  }

  @Test
  public void testDDLShared() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_SHARED);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1,
        TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
View Full Code Here

  }

  @Test
  public void testDDLNoLock() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_NO_LOCK);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertNull(locks);
  }
View Full Code Here

    db.createPartition(u, partVals);
    sem.analyze(tree, ctx);
    // validate the plan
    sem.validate();

    QueryPlan plan = new QueryPlan(query, sem, 0L, testName, null);

    return new ReturnInfo(tree, sem, plan);
  }
View Full Code Here

  public void testQueryTable1() throws ParseException {
    String query = "select * from t1";
    Driver driver = createDriver();
    int rc = driver.compile(query);
    Assert.assertEquals("Checking command success", 0, rc);
    QueryPlan plan = driver.getPlan();
    // check access columns from ColumnAccessInfo
    ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
    List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));

    // check access columns from readEntity
    Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
    cols = tableColsMap.get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
View Full Code Here

  public void testJoinTable1AndTable2() throws ParseException {
    String query = "select * from t1 join t2 on (t1.id1 = t2.id1)";
    Driver driver = createDriver();
    int rc = driver.compile(query);
    Assert.assertEquals("Checking command success", 0, rc);
    QueryPlan plan = driver.getPlan();
    // check access columns from ColumnAccessInfo
    ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
    List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
    cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t2");
    Assert.assertNotNull(cols);
    Assert.assertEquals(3, cols.size());
    Assert.assertNotNull(cols.contains("id2"));
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));


    // check access columns from readEntity
    Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
    cols = tableColsMap.get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
View Full Code Here

  public void testJoinView1AndTable2() throws ParseException {
    String query = "select * from v1 join t2 on (v1.id1 = t2.id1)";
    Driver driver = createDriver();
    int rc = driver.compile(query);
    Assert.assertEquals("Checking command success", 0, rc);
    QueryPlan plan = driver.getPlan();
    // check access columns from ColumnAccessInfo
    ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
    List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@v1");
    Assert.assertNull(cols);
    cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
    cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t2");
    Assert.assertNotNull(cols);
    Assert.assertEquals(3, cols.size());
    Assert.assertNotNull(cols.contains("id2"));
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));


    // check access columns from readEntity
    Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
    cols = tableColsMap.get("default@v1");
    Assert.assertNull(cols);
    cols = tableColsMap.get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
View Full Code Here

      Context ctx = new Context(newJob);
      Path emptyScratchDir = ctx.getMRTmpPath();
      FileSystem fileSys = emptyScratchDir.getFileSystem(newJob);
      fileSys.mkdirs(emptyScratchDir);

      QueryPlan plan = drv.getPlan();
      MapRedTask selectTask = (MapRedTask)plan.getRootTasks().get(0);

      List<Path> inputPaths = Utilities.getInputPaths(newJob, selectTask.getWork().getMapWork(), emptyScratchDir, ctx, false);
      Utilities.setInputPaths(newJob, inputPaths);

      Utilities.setMapRedWork(newJob, selectTask.getWork(), ctx.getMRTmpPath());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.QueryPlan

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.