Package org.apache.lucene.codecs

Examples of org.apache.lucene.codecs.DocValuesConsumer$BitsFilteredTermsEnum


    super("Asserting");
  }

  @Override
  public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
    DocValuesConsumer consumer = in.fieldsConsumer(state);
    assert consumer != null;
    return new AssertingDocValuesConsumer(consumer, state.segmentInfo.getDocCount());
  }
View Full Code Here


        fieldInfos = builder.finish();
        final long nextFieldInfosGen = info.getNextFieldInfosGen();
        final String segmentSuffix = Long.toString(nextFieldInfosGen, Character.MAX_RADIX);
        final SegmentWriteState state = new SegmentWriteState(null, trackingDir, info.info, fieldInfos, writer.getConfig().getTermIndexInterval(), null, IOContext.DEFAULT, segmentSuffix);
        final DocValuesFormat docValuesFormat = codec.docValuesFormat();
        final DocValuesConsumer fieldsConsumer = docValuesFormat.fieldsConsumer(state);
        boolean fieldsConsumerSuccess = false;
        try {
//          System.out.println("[" + Thread.currentThread().getName() + "] RLD.writeLiveDocs: applying updates; seg=" + info + " updates=" + numericUpdates);
          for (Entry<String,NumericFieldUpdates> e : numericFieldUpdates.entrySet()) {
            final String field = e.getKey();
            final NumericFieldUpdates fieldUpdates = e.getValue();
            final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
            assert fieldInfo != null;

            fieldInfo.setDocValuesGen(nextFieldInfosGen);
            // write the numeric updates to a new gen'd docvalues file
            fieldsConsumer.addNumericField(fieldInfo, new Iterable<Number>() {
              final NumericDocValues currentValues = reader.getNumericDocValues(field);
              final Bits docsWithField = reader.getDocsWithField(field);
              final int maxDoc = reader.maxDoc();
              final UpdatesIterator updatesIter = fieldUpdates.getUpdates();
              @Override
              public Iterator<Number> iterator() {
                updatesIter.reset();
                return new Iterator<Number>() {

                  int curDoc = -1;
                  int updateDoc = updatesIter.nextDoc();
                 
                  @Override
                  public boolean hasNext() {
                    return curDoc < maxDoc - 1;
                  }

                  @Override
                  public Number next() {
                    if (++curDoc >= maxDoc) {
                      throw new NoSuchElementException("no more documents to return values for");
                    }
                    if (curDoc == updateDoc) { // this document has an updated value
                      Long value = updatesIter.value(); // either null (unset value) or updated value
                      updateDoc = updatesIter.nextDoc(); // prepare for next round
                      return value;
                    } else {
                      // no update for this document
                      assert curDoc < updateDoc;
                      if (currentValues != null && docsWithField.get(curDoc)) {
                        // only read the current value if the document had a value before
                        return currentValues.get(curDoc);
                      } else {
                        return null;
                      }
                    }
                  }

                  @Override
                  public void remove() {
                    throw new UnsupportedOperationException("this iterator does not support removing elements");
                  }
                };
              }
            });
          }
         
          codec.fieldInfosFormat().getFieldInfosWriter().write(trackingDir, info.info.name, segmentSuffix, fieldInfos, IOContext.DEFAULT);
          fieldsConsumerSuccess = true;
        } finally {
          if (fieldsConsumerSuccess) {
            fieldsConsumer.close();
          } else {
            IOUtils.closeWhileHandlingException(fieldsConsumer);
          }
        }
      } finally {
View Full Code Here

    return mergeState;
  }

  private void mergeDocValues(SegmentWriteState segmentWriteState) throws IOException {
    DocValuesConsumer consumer = codec.docValuesFormat().fieldsConsumer(segmentWriteState);
    boolean success = false;
    try {
      for (FieldInfo field : mergeState.fieldInfos) {
        DocValuesType type = field.getDocValuesType();
        if (type != null) {
          if (type == DocValuesType.NUMERIC) {
            List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
            List<Bits> docsWithField = new ArrayList<Bits>();
            for (AtomicReader reader : mergeState.readers) {
              NumericDocValues values = reader.getNumericDocValues(field.name);
              Bits bits = reader.getDocsWithField(field.name);
              if (values == null) {
                values = NumericDocValues.EMPTY;
                bits = new Bits.MatchNoBits(reader.maxDoc());
              }
              toMerge.add(values);
              docsWithField.add(bits);
            }
            consumer.mergeNumericField(field, mergeState, toMerge, docsWithField);
          } else if (type == DocValuesType.BINARY) {
            List<BinaryDocValues> toMerge = new ArrayList<BinaryDocValues>();
            List<Bits> docsWithField = new ArrayList<Bits>();
            for (AtomicReader reader : mergeState.readers) {
              BinaryDocValues values = reader.getBinaryDocValues(field.name);
              Bits bits = reader.getDocsWithField(field.name);
              if (values == null) {
                values = BinaryDocValues.EMPTY;
                bits = new Bits.MatchNoBits(reader.maxDoc());
              }
              toMerge.add(values);
              docsWithField.add(bits);
            }
            consumer.mergeBinaryField(field, mergeState, toMerge, docsWithField);
          } else if (type == DocValuesType.SORTED) {
            List<SortedDocValues> toMerge = new ArrayList<SortedDocValues>();
            for (AtomicReader reader : mergeState.readers) {
              SortedDocValues values = reader.getSortedDocValues(field.name);
              if (values == null) {
                values = SortedDocValues.EMPTY;
              }
              toMerge.add(values);
            }
            consumer.mergeSortedField(field, mergeState, toMerge);
          } else if (type == DocValuesType.SORTED_SET) {
            List<SortedSetDocValues> toMerge = new ArrayList<SortedSetDocValues>();
            for (AtomicReader reader : mergeState.readers) {
              SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
              if (values == null) {
                values = SortedSetDocValues.EMPTY;
              }
              toMerge.add(values);
            }
            consumer.mergeSortedSetField(field, mergeState, toMerge);
          } else {
            throw new AssertionError("type=" + type);
          }
        }
      }
View Full Code Here

      }
    }
  }

  private void mergeNorms(SegmentWriteState segmentWriteState) throws IOException {
    DocValuesConsumer consumer = codec.normsFormat().normsConsumer(segmentWriteState);
    boolean success = false;
    try {
      for (FieldInfo field : mergeState.fieldInfos) {
        if (field.hasNorms()) {
          List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
          List<Bits> docsWithField = new ArrayList<Bits>();
          for (AtomicReader reader : mergeState.readers) {
            NumericDocValues norms = reader.getNormValues(field.name);
            if (norms == null) {
              norms = NumericDocValues.EMPTY;
            }
            toMerge.add(norms);
            docsWithField.add(new Bits.MatchAllBits(reader.maxDoc()));
          }
          consumer.mergeNumericField(field, mergeState, toMerge, docsWithField);
        }
      }
      success = true;
    } finally {
      if (success) {
View Full Code Here

  @Override
  void flush(SegmentWriteState state) throws IOException {
    if (!writers.isEmpty()) {
      DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat();
      DocValuesConsumer dvConsumer = fmt.fieldsConsumer(state);
      boolean success = false;
      try {
        for(DocValuesWriter writer : writers.values()) {
          writer.finish(state.segmentInfo.getDocCount());
          writer.flush(state, dvConsumer);
View Full Code Here

    super("Asserting");
  }

  @Override
  public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
    DocValuesConsumer consumer = in.fieldsConsumer(state);
    assert consumer != null;
    return new AssertingDocValuesConsumer(consumer, state.segmentInfo.getDocCount());
  }
View Full Code Here

    return mergeState;
  }

  private void mergeDocValues(SegmentWriteState segmentWriteState) throws IOException {
    DocValuesConsumer consumer = codec.docValuesFormat().fieldsConsumer(segmentWriteState);
    boolean success = false;
    try {
      for (FieldInfo field : mergeState.fieldInfos) {
        DocValuesType type = field.getDocValuesType();
        if (type != null) {
          if (type == DocValuesType.NUMERIC) {
            List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
            for (AtomicReader reader : mergeState.readers) {
              NumericDocValues values = reader.getNumericDocValues(field.name);
              if (values == null) {
                values = NumericDocValues.EMPTY;
              }
              toMerge.add(values);
            }
            consumer.mergeNumericField(field, mergeState, toMerge);
          } else if (type == DocValuesType.BINARY) {
            List<BinaryDocValues> toMerge = new ArrayList<BinaryDocValues>();
            for (AtomicReader reader : mergeState.readers) {
              BinaryDocValues values = reader.getBinaryDocValues(field.name);
              if (values == null) {
                values = BinaryDocValues.EMPTY;
              }
              toMerge.add(values);
            }
            consumer.mergeBinaryField(field, mergeState, toMerge);
          } else if (type == DocValuesType.SORTED) {
            List<SortedDocValues> toMerge = new ArrayList<SortedDocValues>();
            for (AtomicReader reader : mergeState.readers) {
              SortedDocValues values = reader.getSortedDocValues(field.name);
              if (values == null) {
                values = SortedDocValues.EMPTY;
              }
              toMerge.add(values);
            }
            consumer.mergeSortedField(field, mergeState, toMerge);
          } else if (type == DocValuesType.SORTED_SET) {
            List<SortedSetDocValues> toMerge = new ArrayList<SortedSetDocValues>();
            for (AtomicReader reader : mergeState.readers) {
              SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
              if (values == null) {
                values = SortedSetDocValues.EMPTY;
              }
              toMerge.add(values);
            }
            consumer.mergeSortedSetField(field, mergeState, toMerge);
          } else {
            throw new AssertionError("type=" + type);
          }
        }
      }
View Full Code Here

      }
    }
  }

  private void mergeNorms(SegmentWriteState segmentWriteState) throws IOException {
    DocValuesConsumer consumer = codec.normsFormat().normsConsumer(segmentWriteState);
    boolean success = false;
    try {
      for (FieldInfo field : mergeState.fieldInfos) {
        if (field.hasNorms()) {
          List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
          for (AtomicReader reader : mergeState.readers) {
            NumericDocValues norms = reader.getNormValues(field.name);
            if (norms == null) {
              norms = NumericDocValues.EMPTY;
            }
            toMerge.add(norms);
          }
          consumer.mergeNumericField(field, mergeState, toMerge);
        }
      }
      success = true;
    } finally {
      if (success) {
View Full Code Here

  }

  /** Writes all buffered doc values (called from {@link #flush}). */
  private void writeDocValues(SegmentWriteState state) throws IOException {
    int docCount = state.segmentInfo.getDocCount();
    DocValuesConsumer dvConsumer = null;
    boolean success = false;
    try {
      for (int i=0;i<fieldHash.length;i++) {
        PerField perField = fieldHash[i];
        while (perField != null) {
View Full Code Here

    }
  }

  private void writeNorms(SegmentWriteState state) throws IOException {
    boolean success = false;
    DocValuesConsumer normsConsumer = null;
    try {
      if (state.fieldInfos.hasNorms()) {
        NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat();
        assert normsFormat != null;
        normsConsumer = normsFormat.normsConsumer(state);
View Full Code Here

TOP

Related Classes of org.apache.lucene.codecs.DocValuesConsumer$BitsFilteredTermsEnum

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.