Examples of IOContext


Examples of org.apache.hadoop.hive.ql.io.IOContext

  }

  private void populateVirtualColumnValues() {
    if (this.vcs != null) {
      ExecMapperContext mapExecCxt = this.getExecContext();
      IOContext ioCxt = mapExecCxt.getIoCxt();
      for (int i = 0; i < vcs.size(); i++) {
        VirtualColumn vc = vcs.get(i);
        if (vc.equals(VirtualColumn.FILENAME) && mapExecCxt.inputFileChanged()) {
          this.vcValues[i] = new Text(mapExecCxt.getCurrentInputFile());
        } else if (vc.equals(VirtualColumn.BLOCKOFFSET)) {
          long current = ioCxt.getCurrentBlockStart();
          LongWritable old = (LongWritable) this.vcValues[i];
          if (old == null) {
            old = new LongWritable(current);
            this.vcValues[i] = old;
            continue;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.IOContext

  }

  private void populateVirtualColumnValues() {
    if (this.vcs != null) {
      ExecMapperContext mapExecCxt = this.getExecContext();
      IOContext ioCxt = mapExecCxt.getIoCxt();
      for (int i = 0; i < vcs.size(); i++) {
        VirtualColumn vc = vcs.get(i);
        if (vc.equals(VirtualColumn.FILENAME) && mapExecCxt.inputFileChanged()) {
          this.vcValues[i] = new Text(mapExecCxt.getCurrentInputFile());
        } else if (vc.equals(VirtualColumn.BLOCKOFFSET)) {
          long current = ioCxt.getCurrentBlockStart();
          LongWritable old = (LongWritable) this.vcValues[i];
          if (old == null) {
            old = new LongWritable(current);
            this.vcValues[i] = old;
            continue;
          }
          if (current != old.get()) {
            old.set(current);
          }
        } else if (vc.equals(VirtualColumn.ROWOFFSET)) {
          long current = ioCxt.getCurrentRow();
          LongWritable old = (LongWritable) this.vcValues[i];
          if (old == null) {
            old = new LongWritable(current);
            this.vcValues[i] = old;
            continue;
View Full Code Here

Examples of org.apache.lucene.store.IOContext

    // what we use in RAM:
    long bytes =  totalPostings * 8 + totalPayloadBytes;

    SegmentWriteState writeState = new SegmentWriteState(null, dir,
                                                         segmentInfo, newFieldInfos,
                                                         32, null, new IOContext(new FlushInfo(maxDoc, bytes)));
    FieldsConsumer fieldsConsumer = codec.postingsFormat().fieldsConsumer(writeState);

    for(Map.Entry<String,Map<BytesRef,Long>> fieldEnt : fields.entrySet()) {
      String field = fieldEnt.getKey();
      Map<BytesRef,Long> terms = fieldEnt.getValue();
View Full Code Here

Examples of org.apache.lucene.store.IOContext

   }


   private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
   throws Exception {
      IOContext context = newIOContext(random());
      SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
      SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);

      final Codec codec = Codec.getDefault();
      TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
View Full Code Here

Examples of org.apache.lucene.store.IOContext

        // docids to update.
        SegmentInfos segments = new SegmentInfos();
        segments.read(dir);

        for (SegmentInfoPerCommit si : segments) {
            SegmentReader reader = new SegmentReader(si, 1, new IOContext());
            BitSet docsToUpdate = getMatchingDocs(updatequery, reader);
            updateSegment(si.info.name, docsToUpdate, diff);
        }

    }
View Full Code Here

Examples of org.apache.lucene.store.IOContext

      }
    }
    FST<T> fst = builder.finish();

    if (random.nextBoolean() && fst != null && !willRewrite) {
      IOContext context = LuceneTestCase.newIOContext(random);
      IndexOutput out = dir.createOutput("fst.bin", context);
      fst.save(out);
      out.close();
      IndexInput in = dir.openInput("fst.bin", context);
      try {
View Full Code Here

Examples of org.apache.lucene.store.IOContext

  private final int version;

  final String segment;
  public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, String segmentSuffix, IOContext context)
    throws IOException {
    in = dir.openInput(IndexFileNames.segmentFileName(segment, segmentSuffix, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true));
    this.segment = segment;
    boolean success = false;
    assert indexDivisor == -1 || indexDivisor > 0;

    try {
View Full Code Here

Examples of org.apache.lucene.store.IOContext

    assert numDocsInRAM > 0;
    assert deleteSlice.isEmpty() : "all deletes must be applied in prepareFlush";
    segmentInfo.setDocCount(numDocsInRAM);
    final SegmentWriteState flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.finish(),
        indexWriterConfig.getTermIndexInterval(),
        pendingUpdates, new IOContext(new FlushInfo(numDocsInRAM, bytesUsed())));
    final double startMBUsed = bytesUsed() / 1024. / 1024.;

    // Apply delete-by-docID now (delete-byDocID only
    // happens when an exception is hit processing that
    // doc, eg if analyzer has some problem w/ the text):
View Full Code Here

Examples of org.apache.lucene.store.IOContext

    SegmentCommitInfo newSegment = flushedSegment.segmentInfo;

    IndexWriter.setDiagnostics(newSegment.info, IndexWriter.SOURCE_FLUSH);
   
    IOContext context = new IOContext(new FlushInfo(newSegment.info.getDocCount(), newSegment.sizeInBytes()));

    boolean success = false;
    try {
     
      if (indexWriterConfig.getUseCompoundFile()) {
View Full Code Here

Examples of org.apache.lucene.store.IOContext

            if (infoStream.isEnabled("IW")) {
              infoStream.message("IW", "addIndexes: process segment origName=" + info.info.name + " newName=" + newSegName + " info=" + info);
            }

            IOContext context = new IOContext(new MergeInfo(info.info.getDocCount(), info.sizeInBytes(), true, -1));

            for(FieldInfo fi : SegmentReader.readFieldInfos(info)) {
              globalFieldNumberMap.addOrGet(fi.name, fi.number, fi.getDocValuesType());
            }
            infos.add(copySegmentAsIs(info, newSegName, dsNames, dsFilesCopied, context, copiedFiles));
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.