Package org.apache.hadoop.hive.ql.io

Examples of org.apache.hadoop.hive.ql.io.RecordIdentifier


        new OrcRawRecordMerger(conf, true, baseReader, false, BUCKET,
            createMaximalTxnList(), new Reader.Options(),
            AcidUtils.getPaths(directory.getCurrentDirectories()));
    assertEquals(null, merger.getMinKey());
    assertEquals(null, merger.getMaxKey());
    RecordIdentifier id = merger.createKey();
    OrcStruct event = merger.createValue();

    assertEquals(true, merger.next(id, event));
    assertEquals(OrcRecordUpdater.UPDATE_OPERATION,
        OrcRecordUpdater.getOperation(event));
View Full Code Here


        isTail = false;
        break;
      }
    }
    if (rowOffset > 0) {
      minKey = new RecordIdentifier(0, bucket, rowOffset - 1);
    }
    if (!isTail) {
      maxKey = new RecordIdentifier(0, bucket, rowOffset + rowLength - 1);
    }
  }
View Full Code Here

      boolean isMajor = jobConf.getBoolean(IS_MAJOR, false);
      AcidInputFormat.RawReader<V> reader =
          aif.getRawReader(jobConf, isMajor, split.getBucket(),
              txnList, split.getBaseDir(), split.getDeltaDirs());
      RecordIdentifier identifier = reader.createKey();
      V value = reader.createValue();
      getWriter(reporter, reader.getObjectInspector(), split.getBucket());
      while (reader.next(identifier, value)) {
        if (isMajor && reader.isDelete(value)) continue;
        writer.write(value);
View Full Code Here

    }
    RecordIdentifier[] result = new RecordIdentifier[stripes.length];
    for(int i=0; i < stripes.length; ++i) {
      if (stripes[i].length() != 0) {
        String[] parts = stripes[i].split(",");
        result[i] = new RecordIdentifier();
        result[i].setValues(Long.parseLong(parts[0]),
            Integer.parseInt(parts[1]), Long.parseLong(parts[2]));
      }
    }
    return result;
View Full Code Here

      case WITH_RECORD_ID:
        for (int i = 0; i < 10; i++) {
          rows.add(
              new TFSORow(
                  new Text("its fleect was white as snow"),
                  new RecordIdentifier(1, 1, i)
              )
          );
        }
        break;

      case WITH_PARTITION_VALUE:
        for (int i = 0; i < 10; i++) {
          rows.add(
              new TFSORow(
                  new Text("its fleect was white as snow"),
                  (i < 5) ? new Text("Monday") : new Text("Tuesday")
              )
          );
        }
        break;

      case WITH_RECORD_ID_AND_PARTITION_VALUE:
        for (int i = 0; i < 10; i++) {
          rows.add(
              new TFSORow(
                  new Text("its fleect was white as snow"),
                  (i < 5) ? new Text("Monday") : new Text("Tuesday"),
                  new RecordIdentifier(1, 1, i)
              )
          );
        }
        break;
View Full Code Here

        partVal = new Text();
        partVal.readFields(dataInput);
      }
      notNull = dataInput.readBoolean();
      if (notNull) {
        recId = new RecordIdentifier();
        recId.readFields(dataInput);
      }
    }
View Full Code Here

        fs.mkdirs(dir);
        partFile = AcidUtils.createBucketFile(dir, bucket);
      }
      FSDataOutputStream out = fs.create(partFile);
      for (int i = 0; i < numRecords; i++) {
        RecordIdentifier ri = new RecordIdentifier(maxTxn - 1, bucket, i);
        ri.write(out);
        out.writeBytes("mary had a little lamb its fleece was white as snow\n");
      }
      out.close();
    }
  }
View Full Code Here

      return true;
    }

    @Override
    public RecordIdentifier createKey() {
      return new RecordIdentifier();
    }
View Full Code Here

      col1 = new Text(val);
    }

    MyRow(String val, long rowId, long origTxn, int bucket) {
      col1 = new Text(val);
      ROW__ID = new RecordIdentifier(origTxn, bucket, rowId);
    }
View Full Code Here

      this.myint = myint;
      this.mylong = mylong;
      this.mytext = new Text(mytext);
      this.myfloat = myfloat;
      this.mydouble = mydouble;
      ROW__ID = new RecordIdentifier(origTxn, bucket, rowId);
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.io.RecordIdentifier

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.