Package org.apache.hadoop.hive.common

Examples of org.apache.hadoop.hive.common.ValidTxnListImpl


    Assert.assertEquals(ValidTxnList.RangeResponse.NONE,
        validTxns.isTxnRangeCommitted(1L, 1L));
    Assert.assertEquals(ValidTxnList.RangeResponse.NONE,
        validTxns.isTxnRangeCommitted(5L, 10L));

    validTxns = new ValidTxnListImpl("10:4:5:6");
    Assert.assertEquals(ValidTxnList.RangeResponse.NONE,
        validTxns.isTxnRangeCommitted(4,6));
    Assert.assertEquals(ValidTxnList.RangeResponse.ALL,
        validTxns.isTxnRangeCommitted(7, 10));
    Assert.assertEquals(ValidTxnList.RangeResponse.SOME,
View Full Code Here


  }

  @Test
  public void stringifyValidTxns() throws Exception {
    // Test with just high water mark
    ValidTxnList validTxns = new ValidTxnListImpl("1:");
    String asString = validTxns.toString();
    Assert.assertEquals("1:", asString);
    validTxns = new ValidTxnListImpl(asString);
    Assert.assertEquals(1, validTxns.getHighWatermark());
    Assert.assertNotNull(validTxns.getOpenTransactions());
    Assert.assertEquals(0, validTxns.getOpenTransactions().length);
    asString = validTxns.toString();
    Assert.assertEquals("1:", asString);
    validTxns = new ValidTxnListImpl(asString);
    Assert.assertEquals(1, validTxns.getHighWatermark());
    Assert.assertNotNull(validTxns.getOpenTransactions());
    Assert.assertEquals(0, validTxns.getOpenTransactions().length);

    // Test with open transactions
    validTxns = new ValidTxnListImpl("10:5:3");
    asString = validTxns.toString();
    if (!asString.equals("10:3:5") && !asString.equals("10:5:3")) {
      Assert.fail("Unexpected string value " + asString);
    }
    validTxns = new ValidTxnListImpl(asString);
    Assert.assertEquals(10, validTxns.getHighWatermark());
    Assert.assertNotNull(validTxns.getOpenTransactions());
    Assert.assertEquals(2, validTxns.getOpenTransactions().length);
    boolean sawThree = false, sawFive = false;
    for (long tid : validTxns.getOpenTransactions()) {
      if (tid == 3sawThree = true;
      else if (tid == 5) sawFive = true;
      else  Assert.fail("Unexpected value " + tid);
    }
    Assert.assertTrue(sawThree);
View Full Code Here

    // No-op
  }

  @Override
  public ValidTxnList getValidTxns() throws LockException {
    return new ValidTxnListImpl();
  }
View Full Code Here

      final String location = sd.getLocation();

      // Create a bogus validTxnList with a high water mark set to MAX_LONG and no open
      // transactions.  This assures that all deltas are treated as valid and all we return are
      // obsolete files.
      final ValidTxnList txnList = new ValidTxnListImpl();

      if (runJobAsSelf(ci.runAs)) {
        removeFiles(location, txnList);
      } else {
        LOG.info("Cleaning as user " + ci.runAs);
View Full Code Here

      // until it finishes.
      @SuppressWarnings("unchecked")//since there is no way to parametrize instance of Class
      AcidInputFormat<WritableComparable, V> aif =
          instantiate(AcidInputFormat.class, jobConf.get(INPUT_FORMAT_CLASS_NAME));
      ValidTxnList txnList =
          new ValidTxnListImpl(jobConf.get(ValidTxnList.VALID_TXNS_KEY));

      boolean isMajor = jobConf.getBoolean(IS_MAJOR, false);
      AcidInputFormat.RawReader<V> reader =
          aif.getRawReader(jobConf, isMajor, split.getBucket(),
              txnList, split.getBaseDir(), split.getDeltaDirs());
View Full Code Here

    int i = 0;
    for(long txn: open) {
      if (currentTxn > 0 && currentTxn == txn) continue;
      exceptions[i++] = txn;
    }
    return new ValidTxnListImpl(exceptions, highWater);
  }
View Full Code Here

      bucket = (int) split.getStart();
      reader = null;
    }
    String txnString = conf.get(ValidTxnList.VALID_TXNS_KEY,
                                Long.MAX_VALUE + ":");
    ValidTxnList validTxnList = new ValidTxnListImpl(txnString);
    final OrcRawRecordMerger records =
        new OrcRawRecordMerger(conf, true, reader, split.isOriginal(), bucket,
            validTxnList, readOptions, deltas);
    return new RowReader<OrcStruct>() {
      OrcStruct innerRecord = records.createValue();
View Full Code Here

              .initialCapacity(cacheStripeDetailsSize).softValues().build();
        }
      }
      String value = conf.get(ValidTxnList.VALID_TXNS_KEY,
                              Long.MAX_VALUE + ":");
      transactionList = new ValidTxnListImpl(value);
    }
View Full Code Here

    tmpdir = new File(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") +
        "testFileSinkOperator");
    tmpdir.mkdir();
    tmpdir.deleteOnExit();
    txnList = new ValidTxnListImpl(new long[]{}, 2);
  }
View Full Code Here

        new MockFile("mock:/tbl/part1/random", 500, new byte[0]),
        new MockFile("mock:/tbl/part1/_done", 0, new byte[0]),
        new MockFile("mock:/tbl/part1/subdir/000000_0", 0, new byte[0]));
    AcidUtils.Directory dir =
        AcidUtils.getAcidState(new MockPath(fs, "/tbl/part1"), conf,
            new ValidTxnListImpl("100:"));
    assertEquals(null, dir.getBaseDirectory());
    assertEquals(0, dir.getCurrentDirectories().size());
    assertEquals(0, dir.getObsolete().size());
    List<FileStatus> result = dir.getOriginalFiles();
    assertEquals(5, result.size());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.common.ValidTxnListImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.