Package com.browseengine.bobo.util

Examples of com.browseengine.bobo.util.BigIntArray


import com.browseengine.bobo.util.BigIntArray;

public class BigIntArrayTest extends TestCase {
  public static void testBigIntArray() {
    int count = 5000000;
    BigIntArray test = new BigIntArray(count);
    int[] test2 = new int[count];
    for (int i = 0; i < count; i++) {
      test.add(i, i);
      test2[i] = i;
    }

    for (int i = 0; i < count; i++) {
      assertEquals(0, test.get(0));
    }

    long start = System.currentTimeMillis();
    for (int i = 0; i < count; i++) {
      test.get(i);
    }
    long end = System.currentTimeMillis();
    System.out.println("Big array took: " + (end - start));

    start = System.currentTimeMillis();
View Full Code Here


    for (int i = 0; i < numVals; ++i) {
      cache.valArray.add(formatter.format(i + 1));
    }
    cache.valArray.seal();
    cache.orderArray = new BigIntArray(numDocsPerSeg);
    return cache;
  }
View Full Code Here

    }
  }

  static void time1(final int[][] array) throws InterruptedException {
    int iter = array.length;
    final BigIntArray bigArray = new BigIntArray(max);
    Thread[] threads = new Thread[iter];

    for (int i = 0; i < iter; ++i) {
      threads[i] = new RunnerThread(array[i], bigArray);
    }
View Full Code Here

  public void testDefaultFaccetIterator() {
    TermStringList tsl1 = new TermStringList();
    tsl1.add("i");
    tsl1.add("m");
    tsl1.seal();
    BigIntArray count = new BigIntArray(2);
    count.add(0, 1);
    count.add(1, 2);
    DefaultFacetIterator itr1 = new DefaultFacetIterator(tsl1, count, 2, false);
    TermStringList tsl2 = new TermStringList();
    tsl2.add("i");
    tsl2.add("m");
    tsl2.seal();
    BigIntArray count2 = new BigIntArray(2);
    count2.add(0, 1);
    count2.add(1, 5);
    DefaultFacetIterator itr2 = new DefaultFacetIterator(tsl2, count2, 2, true);
    List<FacetIterator> list = new ArrayList<FacetIterator>();
    list.add(itr1);
    list.add(itr2);
    CombinedFacetIterator ctr = new CombinedFacetIterator(list);
View Full Code Here

    DecimalFormat df = new DecimalFormat(format);
    List<IntFacetIterator> list = new ArrayList<IntFacetIterator>();
    for (int seg = 0; seg < 5; seg++) {
      TermIntList tsl1 = new TermIntList(format);
      int limit = 25;
      BigIntArray count = new BigIntArray(limit);
      String[] terms = new String[limit];
      for (int i = limit - 1; i >= 0; i--) {
        terms[i] = df.format(i);
      }
      Arrays.sort(terms);
      for (int i = 0; i < limit; i++) {
        tsl1.add(terms[i]);
        count.add(i, i);
      }
      tsl1.seal();
      DefaultIntFacetIterator itr1 = new DefaultIntFacetIterator(tsl1, count, limit, true);
      list.add(itr1);
    }
View Full Code Here

    // dictionary)
    if (termCount < Byte.MAX_VALUE) {
      return new BigByteArray(maxDoc);
    } else if (termCount < Short.MAX_VALUE) {
      return new BigShortArray(maxDoc);
    } else return new BigIntArray(maxDoc);
  }
View Full Code Here

    if (termCount < Byte.MAX_VALUE) {
      return new BigByteArray(maxDoc);
    } else if (termCount < Short.MAX_VALUE) {
      return new BigShortArray(maxDoc);
    } else
      return new BigIntArray(maxDoc);
  }
View Full Code Here

    _facetDataFetcher.cleanup(reader);

    int maxDoc = reader.maxDoc();
    int size = dataMap == null ? 1:(dataMap.size() + 1);

    BigSegmentedArray order = new BigIntArray(maxDoc);
    TermValueList list = _termListFactory == null ?
      new TermStringList(size) :
      _termListFactory.createTermList(size);

    int[] freqs = new int[size];
    int[] minIDs = new int[size];
    int[] maxIDs = new int[size];

    list.add(null);
    freqs[0] = nullFreq;
    minIDs[0] = nullMinId;
    maxIDs[0] = nullMaxId;

    if (dataMap != null)
    {
      int i = 1;
      Integer docId;
      for (Map.Entry<Object, LinkedList<Integer>> entry : dataMap.entrySet())
      {
        list.add(list.format(entry.getKey()));
        docList = entry.getValue();
        freqs[i] = docList.size();
        minIDs[i] = docList.get(0);
        while((docId = docList.poll()) != null)
        {
          doc = docId;
          order.add(doc, i);
        }
        maxIDs[i] = doc;
        ++i;
      }
    }
View Full Code Here

  @Override
  public FacetDataCache load(BoboIndexReader reader) throws IOException {
    int maxDoc = reader.maxDoc();

    BigIntArray order = new BigIntArray(maxDoc);

    TermValueList mterms = _termListFactory == null ? new TermStringList() : _termListFactory.createTermList();
   
    IntArrayList minIDList=new IntArrayList();
      IntArrayList maxIDList=new IntArrayList();
      IntArrayList freqList = new IntArrayList();
     
    TermDocs termDocs = null;
    TermEnum termEnum = null;
    int t = 0; // current term number
    mterms.add(null);
    minIDList.add(-1);
      maxIDList.add(-1);
      freqList.add(0);
    t++;
    try {
      termDocs = reader.termDocs();
      termEnum = reader.terms(new Term(_indexFieldName, ""));
      do {
        if (termEnum == null)
          break;
        Term term = termEnum.term();
        if (term == null || !_indexFieldName.equals(term.field()))
          break;

        // store term text
        // we expect that there is at most one term per document
        if (t > MAX_VAL_COUNT) {
          throw new IOException("maximum number of value cannot exceed: "
              + MAX_VAL_COUNT);
        }
        String val = term.text();
        mterms.add(val);
        int bit = (0x00000001 << (t-1));
        termDocs.seek(termEnum);
        //freqList.add(termEnum.docFreq());  // removed because the df doesn't take into account the num of deletedDocs
        int df = 0;
        int minID=-1;
            int maxID=-1;
            if(termDocs.next())
            {
              df++;
                  int docid = termDocs.doc();
                  order.add(docid, order.get(docid) | bit);
                  minID = docid;
                  while (termDocs.next())
              {
                    df++;
                    docid = termDocs.doc();
                    order.add(docid, order.get(docid) | bit);
              }
          maxID = docid;
            }
              freqList.add(df);
        minIDList.add(minID);
View Full Code Here

  @Override
  public FacetDataCache load(BoboIndexReader reader) throws IOException {
    int maxDoc = reader.maxDoc();

    BigIntArray order = new BigIntArray(maxDoc);

    TermValueList mterms = _termListFactory == null ? new TermStringList() : _termListFactory.createTermList();
   
    IntArrayList minIDList=new IntArrayList();
      IntArrayList maxIDList=new IntArrayList();
      IntArrayList freqList = new IntArrayList();
     
    TermDocs termDocs = null;
    TermEnum termEnum = null;
    int t = 0; // current term number
    mterms.add(null);
    minIDList.add(-1);
      maxIDList.add(-1);
      freqList.add(0);
    t++;
    try {
      termDocs = reader.termDocs();
      termEnum = reader.terms(new Term(_indexFieldName, ""));
      do {
        if (termEnum == null)
          break;
        Term term = termEnum.term();
        if (term == null || !_indexFieldName.equals(term.field()))
          break;

        // store term text
        // we expect that there is at most one term per document
        if (t > MAX_VAL_COUNT) {
          throw new IOException("maximum number of value cannot exceed: "
              + MAX_VAL_COUNT);
        }
        String val = term.text();
        mterms.add(val);
        int bit = (0x00000001 << (t-1));
        termDocs.seek(termEnum);
        //freqList.add(termEnum.docFreq());  // removed because the df doesn't take into account the num of deletedDocs
        int df = 0;
        int minID=-1;
            int maxID=-1;
            if(termDocs.next())
            {
              df++;
                  int docid = termDocs.doc();
                  order.add(docid, order.get(docid) | bit);
                  minID = docid;
                  while (termDocs.next())
              {
                    df++;
                    docid = termDocs.doc();
                    order.add(docid, order.get(docid) | bit);
              }
          maxID = docid;
            }
              freqList.add(df);
        minIDList.add(minID);
View Full Code Here

TOP

Related Classes of com.browseengine.bobo.util.BigIntArray

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.