Package org.apache.lucene.codecs

Examples of org.apache.lucene.codecs.DocValuesConsumer$BitsFilteredTermsEnum


  @Override
  void flush(SegmentWriteState state) throws IOException {
    if (!writers.isEmpty()) {
      DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat();
      DocValuesConsumer dvConsumer = fmt.fieldsConsumer(state);
      boolean success = false;
      try {
        for(DocValuesWriter writer : writers.values()) {
          writer.finish(state.segmentInfo.getDocCount());
          writer.flush(state, dvConsumer);
View Full Code Here


  @Override
  void flush(SegmentWriteState state) throws IOException {
    if (!writers.isEmpty()) {
      DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat();
      DocValuesConsumer dvConsumer = fmt.fieldsConsumer(state);
      boolean success = false;
      try {
        for(DocValuesWriter writer : writers.values()) {
          writer.finish(state.segmentInfo.getDocCount());
          writer.flush(state, dvConsumer);
View Full Code Here

        fieldInfos = builder.finish();
        final long nextFieldInfosGen = info.getNextFieldInfosGen();
        final String segmentSuffix = Long.toString(nextFieldInfosGen, Character.MAX_RADIX);
        final SegmentWriteState state = new SegmentWriteState(null, trackingDir, info.info, fieldInfos, writer.getConfig().getTermIndexInterval(), null, IOContext.DEFAULT, segmentSuffix);
        final DocValuesFormat docValuesFormat = codec.docValuesFormat();
        final DocValuesConsumer fieldsConsumer = docValuesFormat.fieldsConsumer(state);
        boolean fieldsConsumerSuccess = false;
        try {
//          System.out.println("[" + Thread.currentThread().getName() + "] RLD.writeFieldUpdates: applying numeric updates; seg=" + info + " updates=" + numericFieldUpdates);
          for (Entry<String,NumericDocValuesFieldUpdates> e : dvUpdates.numericDVUpdates.entrySet()) {
            final String field = e.getKey();
            final NumericDocValuesFieldUpdates fieldUpdates = e.getValue();
            final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
            assert fieldInfo != null;

            fieldInfo.setDocValuesGen(nextFieldInfosGen);
            // write the numeric updates to a new gen'd docvalues file
            fieldsConsumer.addNumericField(fieldInfo, new Iterable<Number>() {
              final NumericDocValues currentValues = reader.getNumericDocValues(field);
              final Bits docsWithField = reader.getDocsWithField(field);
              final int maxDoc = reader.maxDoc();
              final NumericDocValuesFieldUpdates.Iterator updatesIter = fieldUpdates.iterator();
              @Override
              public Iterator<Number> iterator() {
                updatesIter.reset();
                return new Iterator<Number>() {

                  int curDoc = -1;
                  int updateDoc = updatesIter.nextDoc();
                 
                  @Override
                  public boolean hasNext() {
                    return curDoc < maxDoc - 1;
                  }

                  @Override
                  public Number next() {
                    if (++curDoc >= maxDoc) {
                      throw new NoSuchElementException("no more documents to return values for");
                    }
                    if (curDoc == updateDoc) { // this document has an updated value
                      Long value = updatesIter.value(); // either null (unset value) or updated value
                      updateDoc = updatesIter.nextDoc(); // prepare for next round
                      return value;
                    } else {
                      // no update for this document
                      assert curDoc < updateDoc;
                      if (currentValues != null && docsWithField.get(curDoc)) {
                        // only read the current value if the document had a value before
                        return currentValues.get(curDoc);
                      } else {
                        return null;
                      }
                    }
                  }

                  @Override
                  public void remove() {
                    throw new UnsupportedOperationException("this iterator does not support removing elements");
                  }
                };
              }
            });
          }

//        System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " updates=" + dvUpdates.binaryDVUpdates);
        for (Entry<String,BinaryDocValuesFieldUpdates> e : dvUpdates.binaryDVUpdates.entrySet()) {
          final String field = e.getKey();
          final BinaryDocValuesFieldUpdates dvFieldUpdates = e.getValue();
          final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
          assert fieldInfo != null;

//          System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " f=" + dvFieldUpdates + ", updates=" + dvFieldUpdates);

          fieldInfo.setDocValuesGen(nextFieldInfosGen);
          // write the numeric updates to a new gen'd docvalues file
          fieldsConsumer.addBinaryField(fieldInfo, new Iterable<BytesRef>() {
            final BinaryDocValues currentValues = reader.getBinaryDocValues(field);
            final Bits docsWithField = reader.getDocsWithField(field);
            final int maxDoc = reader.maxDoc();
            final BinaryDocValuesFieldUpdates.Iterator updatesIter = dvFieldUpdates.iterator();
            @Override
            public Iterator<BytesRef> iterator() {
              updatesIter.reset();
              return new Iterator<BytesRef>() {

                int curDoc = -1;
                int updateDoc = updatesIter.nextDoc();
                BytesRef scratch = new BytesRef();
               
                @Override
                public boolean hasNext() {
                  return curDoc < maxDoc - 1;
                }

                @Override
                public BytesRef next() {
                  if (++curDoc >= maxDoc) {
                    throw new NoSuchElementException("no more documents to return values for");
                  }
                  if (curDoc == updateDoc) { // this document has an updated value
                    BytesRef value = updatesIter.value(); // either null (unset value) or updated value
                    updateDoc = updatesIter.nextDoc(); // prepare for next round
                    return value;
                  } else {
                    // no update for this document
                    assert curDoc < updateDoc;
                    if (currentValues != null && docsWithField.get(curDoc)) {
                      // only read the current value if the document had a value before
                      currentValues.get(curDoc, scratch);
                      return scratch;
                    } else {
                      return null;
                    }
                  }
                }

                @Override
                public void remove() {
                  throw new UnsupportedOperationException("this iterator does not support removing elements");
                }
              };
            }
          });
        }

          codec.fieldInfosFormat().getFieldInfosWriter().write(trackingDir, info.info.name, segmentSuffix, fieldInfos, IOContext.DEFAULT);
          fieldsConsumerSuccess = true;
        } finally {
          if (fieldsConsumerSuccess) {
            fieldsConsumer.close();
          } else {
            IOUtils.closeWhileHandlingException(fieldsConsumer);
          }
        }
      } finally {
View Full Code Here

    return mergeState;
  }

  private void mergeDocValues(SegmentWriteState segmentWriteState) throws IOException {
    DocValuesConsumer consumer = codec.docValuesFormat().fieldsConsumer(segmentWriteState);
    boolean success = false;
    try {
      for (FieldInfo field : mergeState.fieldInfos) {
        DocValuesType type = field.getDocValuesType();
        if (type != null) {
          if (type == DocValuesType.NUMERIC) {
            List<NumericDocValues> toMerge = new ArrayList<>();
            List<Bits> docsWithField = new ArrayList<>();
            for (AtomicReader reader : mergeState.readers) {
              NumericDocValues values = reader.getNumericDocValues(field.name);
              Bits bits = reader.getDocsWithField(field.name);
              if (values == null) {
                values = DocValues.EMPTY_NUMERIC;
                bits = new Bits.MatchNoBits(reader.maxDoc());
              }
              toMerge.add(values);
              docsWithField.add(bits);
            }
            consumer.mergeNumericField(field, mergeState, toMerge, docsWithField);
          } else if (type == DocValuesType.BINARY) {
            List<BinaryDocValues> toMerge = new ArrayList<>();
            List<Bits> docsWithField = new ArrayList<>();
            for (AtomicReader reader : mergeState.readers) {
              BinaryDocValues values = reader.getBinaryDocValues(field.name);
              Bits bits = reader.getDocsWithField(field.name);
              if (values == null) {
                values = DocValues.EMPTY_BINARY;
                bits = new Bits.MatchNoBits(reader.maxDoc());
              }
              toMerge.add(values);
              docsWithField.add(bits);
            }
            consumer.mergeBinaryField(field, mergeState, toMerge, docsWithField);
          } else if (type == DocValuesType.SORTED) {
            List<SortedDocValues> toMerge = new ArrayList<>();
            for (AtomicReader reader : mergeState.readers) {
              SortedDocValues values = reader.getSortedDocValues(field.name);
              if (values == null) {
                values = DocValues.EMPTY_SORTED;
              }
              toMerge.add(values);
            }
            consumer.mergeSortedField(field, mergeState, toMerge);
          } else if (type == DocValuesType.SORTED_SET) {
            List<SortedSetDocValues> toMerge = new ArrayList<>();
            for (AtomicReader reader : mergeState.readers) {
              SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
              if (values == null) {
                values = DocValues.EMPTY_SORTED_SET;
              }
              toMerge.add(values);
            }
            consumer.mergeSortedSetField(field, mergeState, toMerge);
          } else {
            throw new AssertionError("type=" + type);
          }
        }
      }
View Full Code Here

      }
    }
  }

  private void mergeNorms(SegmentWriteState segmentWriteState) throws IOException {
    DocValuesConsumer consumer = codec.normsFormat().normsConsumer(segmentWriteState);
    boolean success = false;
    try {
      for (FieldInfo field : mergeState.fieldInfos) {
        if (field.hasNorms()) {
          List<NumericDocValues> toMerge = new ArrayList<>();
          List<Bits> docsWithField = new ArrayList<>();
          for (AtomicReader reader : mergeState.readers) {
            NumericDocValues norms = reader.getNormValues(field.name);
            if (norms == null) {
              norms = DocValues.EMPTY_NUMERIC;
            }
            toMerge.add(norms);
            docsWithField.add(new Bits.MatchAllBits(reader.maxDoc()));
          }
          consumer.mergeNumericField(field, mergeState, toMerge, docsWithField);
        }
      }
      success = true;
    } finally {
      if (success) {
View Full Code Here

public class AssertingNormsFormat extends NormsFormat {
  private final NormsFormat in = new Lucene42NormsFormat();
 
  @Override
  public DocValuesConsumer normsConsumer(SegmentWriteState state) throws IOException {
    DocValuesConsumer consumer = in.normsConsumer(state);
    assert consumer != null;
    return new AssertingNormsConsumer(consumer, state.segmentInfo.getDocCount());
  }
View Full Code Here

    super("Asserting");
  }

  @Override
  public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
    DocValuesConsumer consumer = in.fieldsConsumer(state);
    assert consumer != null;
    return new AssertingDocValuesConsumer(consumer, state.segmentInfo.getDocCount());
  }
View Full Code Here

  void abort() {}
 
  @Override
  public void flush(Map<String,InvertedDocEndConsumerPerField> fieldsToFlush, SegmentWriteState state) throws IOException {
    boolean success = false;
    DocValuesConsumer normsConsumer = null;
    try {
      if (state.fieldInfos.hasNorms()) {
        NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat();
        assert normsFormat != null;
        normsConsumer = normsFormat.normsConsumer(state);
View Full Code Here

TOP

Related Classes of org.apache.lucene.codecs.DocValuesConsumer$BitsFilteredTermsEnum

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.