Package it.unimi.dsi.fastutil.ints

Examples of it.unimi.dsi.fastutil.ints.IntArrayList


   * @param vals
   * @return the array of order indices of the values.
   */
  public static <T> int[] convert(FacetDataCache<T> dataCache, T[] vals) {
    if (vals != null && (vals instanceof String[])) return convertString(dataCache, (String[]) vals);
    IntList list = new IntArrayList(vals.length);
    for (int i = 0; i < vals.length; ++i) {
      int index = dataCache.valArray.indexOfWithType(vals[i]);
      if (index >= 0) {
        list.add(index);
      }
    }
    return list.toIntArray();
  }
View Full Code Here


      public RandomAccessDocIdSet getRandomAccessDocIdSet(BoboIndexReader reader)
          throws IOException {
        ZoieSegmentReader<?> zoieReader = (ZoieSegmentReader<?>)(reader.getInnerReader());
        DocIDMapper<?> docidMapper = zoieReader.getDocIDMaper();
       
        final IntArrayList docidList = new IntArrayList(valSet.size());
       
        LongIterator iter = valSet.iterator();
       
        while(iter.hasNext()){
          int docid = docidMapper.getDocID(iter.nextLong());
          if (docid!=DocIDMapper.NOT_FOUND){
            docidList.add(docid);
          }
        }
       
        if (docidList.size()==0) return EmptyDocIdSet.getInstance();
        int[] delDocIds = zoieReader.getDelDocIds();
        if (docidList.size()==1) {
          int docId = docidList.getInt(0);
          if (delDocIds == null || delDocIds.length ==0 || Arrays.binarySearch(delDocIds, docId) < 0) {
            return new SingleDocRandmAccessDocIdSet(docidList.getInt(0));
          } else {
            return EmptyDocIdSet.getInstance();
          }        
        }       
        Collections.sort(docidList);       
        final IntArrayDocIdSet intArraySet = new IntArrayDocIdSet(docidList.size());
        boolean deletesPresent = delDocIds != null && delDocIds.length > 0;      
        for (int docid : docidList){
          if (!deletesPresent  || Arrays.binarySearch(delDocIds,docid) < 0) {
            intArraySet.addDoc(docid);           
          }
        }       
        return new RandomAccessDocIdSet(){
          @Override
          public boolean get(int docid) {
            return docidList.contains(docid);
          }

          @Override
          public DocIdSetIterator iterator() throws IOException {
            return intArraySet.iterator();
View Full Code Here

    }

  }

  public static int[] convertIndexes(FacetDataCache<?> dataCache, String[] vals) {
    IntList list = new IntArrayList();
    for (String val : vals) {
      int[] range = parse(dataCache, val);
      if (range != null) {
        for (int i = range[0]; i <= range[1]; ++i) {
          list.add(i);
        }
      }
    }
    return list.toIntArray();
  }
View Full Code Here

        ImmutableList.Builder<PageAndPositions> builder = ImmutableList.builder();
        long nextDistinctId = 0;
        GroupByHash groupByHash = new GroupByHash(types, allChannels, 10_000);
        for (UpdateRequest request : requests) {
            IntList positions = new IntArrayList();
            Block[] blocks = request.getBlocks();

            // Move through the positions while advancing the cursors in lockstep
            int positionCount = blocks[0].getPositionCount();
            for (int position = 0; position < positionCount; position++) {
                // We are reading ahead in the cursors, so we need to filter any nulls since they can not join
                if (!containsNullValue(position, blocks) && groupByHash.putIfAbsent(position, blocks) == nextDistinctId) {
                    nextDistinctId++;

                    // Only include the key if it is not already in the index
                    if (existingSnapshot.getJoinPosition(position, blocks) == UNLOADED_INDEX_KEY) {
                        positions.add(position);
                    }
                }
            }

            if (!positions.isEmpty()) {
                builder.add(new PageAndPositions(request, positions));
            }
        }

        pageAndPositions = builder.build();
View Full Code Here

            checkNotNull(type, "type is null");

            this.type = type;
            this.slice = Slices.allocate(Ints.checkedCast(DEFAULT_MAX_BLOCK_SIZE.toBytes()));
            this.sliceOutput = slice.getOutput();
            this.positionOffsets = new IntArrayList(1024);
        }
View Full Code Here

        ImmutableList.Builder<PageAndPositions> builder = ImmutableList.builder();
        long nextDistinctId = 0;
        GroupByHash groupByHash = new GroupByHash(types, allChannels, 10_000);
        for (UpdateRequest request : requests) {
            IntList positions = new IntArrayList();

            BlockCursor[] cursors = request.duplicateCursors();

            // Move through the positions while advancing the cursors in lockstep
            int positionCount = cursors[0].getRemainingPositions() + 1;
            for (int index = 0; index < positionCount; index++) {
                // cursors are start at valid position, so we don't need to advance the first time
                if (index > 0) {
                    for (BlockCursor cursor : cursors) {
                        checkState(cursor.advanceNextPosition());
                    }
                }

                // We are reading ahead in the cursors, so we need to filter any nulls since they can not join
                if (!containsNullValue(cursors) && groupByHash.putIfAbsent(cursors) == nextDistinctId) {
                    nextDistinctId++;

                    // Only include the key if it is not already in the index
                    if (existingSnapshot.getJoinPosition(cursors) == UNLOADED_INDEX_KEY) {
                        positions.add(cursors[0].getPosition());
                    }
                }
            }

            if (!positions.isEmpty()) {
                builder.add(new PageAndPositions(request, positions));
            }
        }

        pageAndPositions = builder.build();
View Full Code Here

    public ChannelHash(ChannelIndex channelIndex, OperatorContext operatorContext)
    {
        hashStrategy = new SliceHashStrategy(channelIndex.getTupleInfo(), channelIndex.getSlices().elements());
        addressToPositionMap = new AddressToPositionMap(channelIndex.getPositionCount(), hashStrategy);
        addressToPositionMap.defaultReturnValue(-1);
        positionLinks = new IntArrayList(new int[channelIndex.getValueAddresses().size()]);
        Arrays.fill(positionLinks.elements(), -1);
        for (int position = 0; position < channelIndex.getValueAddresses().size(); position++) {
            operatorContext.setMemoryReservation(getEstimatedSize());
            long sliceAddress = channelIndex.getValueAddresses().elements()[position];
            int oldPosition = addressToPositionMap.put(sliceAddress, position);
View Full Code Here

    BigIntArray order = new BigIntArray(maxDoc);

    TermValueList<?> mterms = _termListFactory == null ? new TermStringList() : _termListFactory
        .createTermList();

    IntArrayList minIDList = new IntArrayList();
    IntArrayList maxIDList = new IntArrayList();
    IntArrayList freqList = new IntArrayList();

    int t = 0; // current term number
    mterms.add(null);
    minIDList.add(-1);
    maxIDList.add(-1);
    freqList.add(0);
    t++;
    Terms terms = reader.terms(_indexFieldName);
    if (terms != null) {
      TermsEnum termsEnum = terms.iterator(null);
      BytesRef text;
      while ((text = termsEnum.next()) != null) {
        // store term text
        // we expect that there is at most one term per document
        if (t > MAX_VAL_COUNT) {
          throw new IOException("maximum number of value cannot exceed: " + MAX_VAL_COUNT);
        }
        String val = text.utf8ToString();
        mterms.add(val);
        int bit = (0x00000001 << (t - 1));
        Term term = new Term(_indexFieldName, val);
        DocsEnum docsEnum = reader.termDocsEnum(term);
        // freqList.add(termEnum.docFreq()); // removed because the df doesn't take into account the
        // num of deletedDocs
        int df = 0;
        int minID = -1;
        int maxID = -1;
        int docID = -1;
        while ((docID = docsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
          df++;
          order.add(docID, order.get(docID) | bit);
          minID = docID;
          while (docsEnum.nextDoc() != DocsEnum.NO_MORE_DOCS) {
            docID = docsEnum.docID();
            df++;
            order.add(docID, order.get(docID) | bit);
          }
          maxID = docID;
        }
        freqList.add(df);
        minIDList.add(minID);
        maxIDList.add(maxID);
        t++;
      }
    }

    mterms.seal();

    return new FacetDataCache(order, mterms, freqList.toIntArray(), minIDList.toIntArray(),
        maxIDList.toIntArray(), TermCountSize.large);
  }
View Full Code Here

    BufferedLoader loader = getBufferedLoader(maxdoc, workArea);

    @SuppressWarnings("unchecked")
    TermValueList<T> list = (listFactory == null ? (TermValueList<T>) new TermStringList()
        : listFactory.createTermList());
    IntArrayList minIDList = new IntArrayList();
    IntArrayList maxIDList = new IntArrayList();
    IntArrayList freqList = new IntArrayList();
    OpenBitSet bitset = new OpenBitSet(maxdoc + 1);
    int negativeValueCount = getNegativeValueCount(reader, field);
    int t = 0; // current term number
    list.add(null);
    minIDList.add(-1);
    maxIDList.add(-1);
    freqList.add(0);
    t++;

    _overflow = false;

    Terms terms = reader.terms(field);
    if (terms != null) {
      TermsEnum termsEnum = terms.iterator(null);
      BytesRef text;
      while ((text = termsEnum.next()) != null) {
        String strText = text.utf8ToString();
        list.add(strText);

        Term term = new Term(field, strText);
        DocsEnum docsEnum = reader.termDocsEnum(term);
        // freqList.add(tenum.docFreq()); // removed because the df doesn't take into account
        // the num of deletedDocs
        int df = 0;
        int minID = -1;
        int maxID = -1;
        int docID = -1;
        int valId = (t - 1 < negativeValueCount) ? (negativeValueCount - t + 1) : t;
        while ((docID = docsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
          df++;
          if (!loader.add(docID, valId)) logOverflow(fieldName);
          minID = docID;
          bitset.fastSet(docID);
          while (docsEnum.nextDoc() != DocsEnum.NO_MORE_DOCS) {
            docID = docsEnum.docID();
            df++;
            if (!loader.add(docID, valId)) logOverflow(fieldName);
            bitset.fastSet(docID);
          }
          maxID = docID;
        }
        freqList.add(df);
        minIDList.add(minID);
        maxIDList.add(maxID);
        t++;
      }
    }

    list.seal();

    try {
      _nestedArray.load(maxdoc + 1, loader);
    } catch (IOException e) {
      throw e;
    } catch (Exception e) {
      throw new RuntimeException("failed to load due to " + e.toString(), e);
    }

    this.valArray = list;
    this.freqs = freqList.toIntArray();
    this.minIDs = minIDList.toIntArray();
    this.maxIDs = maxIDList.toIntArray();

    int doc = 0;
    while (doc <= maxdoc && !_nestedArray.contains(doc, 0, true)) {
View Full Code Here

    }

    @SuppressWarnings("unchecked")
    TermValueList<T> list = (listFactory == null ? (TermValueList<T>) new TermStringList()
        : listFactory.createTermList());
    IntArrayList minIDList = new IntArrayList();
    IntArrayList maxIDList = new IntArrayList();
    IntArrayList freqList = new IntArrayList();
    OpenBitSet bitset = new OpenBitSet(maxdoc + 1);

    int t = 0; // current term number
    list.add(null);
    minIDList.add(-1);
    maxIDList.add(-1);
    freqList.add(0);
    t++;

    _overflow = false;

    Terms terms = reader.terms(field);
    if (terms != null) {
      TermsEnum termsEnum = terms.iterator(null);
      BytesRef text;
      while ((text = termsEnum.next()) != null) {
        String strText = text.utf8ToString();
        list.add(strText);

        Term term = new Term(field, strText);
        DocsEnum docsEnum = reader.termDocsEnum(term);

        int df = 0;
        int minID = -1;
        int maxID = -1;
        int docID = -1;
        while ((docID = docsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
          df++;
          if (!_nestedArray.addData(docID, t)) logOverflow(fieldName);
          minID = docID;
          bitset.fastSet(docID);
          int valId = (t - 1 < negativeValueCount) ? (negativeValueCount - t + 1) : t;
          while (docsEnum.nextDoc() != DocsEnum.NO_MORE_DOCS) {
            docID = docsEnum.docID();
            df++;
            if (!_nestedArray.addData(docID, valId)) logOverflow(fieldName);
            bitset.fastSet(docID);
          }
          maxID = docID;
        }
        freqList.add(df);
        minIDList.add(minID);
        maxIDList.add(maxID);
        t++;
      }
    }

    list.seal();

    this.valArray = list;
    this.freqs = freqList.toIntArray();
    this.minIDs = minIDList.toIntArray();
    this.maxIDs = maxIDList.toIntArray();

    int doc = 0;
    while (doc <= maxdoc && !_nestedArray.contains(doc, 0, true)) {
View Full Code Here

TOP

Related Classes of it.unimi.dsi.fastutil.ints.IntArrayList

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.