Examples of VectorContainer


Examples of org.apache.drill.exec.record.VectorContainer

  }

  private void purge() throws SchemaChangeException {
    Stopwatch watch = new Stopwatch();
    watch.start();
    VectorContainer c = priorityQueue.getHyperBatch();
    VectorContainer newContainer = new VectorContainer();
    SelectionVector4 selectionVector4 = priorityQueue.getHeapSv4();
    SimpleRecordBatch batch = new SimpleRecordBatch(c, selectionVector4, context);
    SimpleRecordBatch newBatch = new SimpleRecordBatch(newContainer, null, context);
    if (copier == null) {
      copier = RemovingRecordBatch.getGenerated4Copier(batch, context, oContext.getAllocator(),  newContainer, newBatch);
    } else {
      for(VectorWrapper<?> i : batch){

        ValueVector v = TypeHelper.getNewVector(i.getField(), oContext.getAllocator());
        newContainer.add(v);
      }
      copier.setupRemover(context, batch, newBatch);
    }
    SortRecordBatchBuilder builder = new SortRecordBatchBuilder(oContext.getAllocator(), MAX_SORT_BYTES);
    do {
      int count = selectionVector4.getCount();
      int copiedRecords = copier.copyRecords(0, count);
      assert copiedRecords == count;
      for(VectorWrapper<?> v : newContainer){
        ValueVector.Mutator m = v.getValueVector().getMutator();
        m.setValueCount(count);
      }
      newContainer.buildSchema(BatchSchema.SelectionVectorMode.NONE);
      newContainer.setRecordCount(count);
      builder.add(newBatch);
    } while (selectionVector4.next());
    selectionVector4.clear();
    c.clear();
    VectorContainer newQueue = new VectorContainer();
    builder.canonicalize();
    builder.build(context, newQueue);
    priorityQueue.resetQueue(newQueue, builder.getSv4().createNewWrapperCurrent());
    builder.getSv4().clear();
    selectionVector4.clear();
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

      this.batchIndex = idx;
     
      if (idx == 0) {  // first batch holder can use the original htContainer
        htContainer = htContainerOrig;
      } else { // otherwise create a new one using the original's fields
        htContainer = new VectorContainer();
        for (VectorWrapper<?> w : htContainerOrig) {
          ValueVector vv = TypeHelper.getNewVector(w.getField(), allocator);
          vv.allocateNew();
          htContainer.add(vv);
        }
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    private int maxOccupiedIdx = -1;
    private int batchOutputCount = 0;

    private BatchHolder() {

      aggrValuesContainer = new VectorContainer();

      ValueVector vector ;

      for(int i = 0; i < materializedValueFields.length; i++) {
        MaterializedField outputField = materializedValueFields[i];
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    return newSchema ? IterOutcome.OK_NEW_SCHEMA : IterOutcome.OK;
  }

  private boolean load(RecordBatchData batch) {
    VectorContainer newContainer = batch.getContainer();
    if (schema != null && newContainer.getSchema().equals(schema)) {
      container.zeroVectors();
      BatchSchema schema = container.getSchema();
      for (int i = 0; i < container.getNumberOfColumns(); i++) {
        MaterializedField field = schema.getColumn(i);
        MajorType type = field.getType();
        ValueVector vOut = container.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
                container.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
        ValueVector vIn = newContainer.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
                newContainer.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
        TransferPair tp = vIn.makeTransferPair(vOut);
        tp.transfer();
      }
      return false;
    } else {
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    if (isProbe) {
      keyExprsProbe = new LogicalExpression[htConfig.getKeyExprsProbe().length];
    }
   
    ErrorCollector collector = new ErrorCollectorImpl();
    VectorContainer htContainerOrig = new VectorContainer(); // original ht container from which others may be cloned
    LogicalExpression[] htKeyExprs = new LogicalExpression[htConfig.getKeyExprsBuild().length];
    TypedFieldId[] htKeyFieldIds = new TypedFieldId[htConfig.getKeyExprsBuild().length];

    int i = 0;
    for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingBuild, collector, context.getFunctionRegistry());
      if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
      if (expr == null) continue;
      keyExprsBuild[i] = expr;
     
      final MaterializedField outputField = MaterializedField.create(ne.getRef(), expr.getMajorType());
      // create a type-specific ValueVector for this key
      ValueVector vv = TypeHelper.getNewVector(outputField, allocator);
      vv.allocateNew();
      htKeyFieldIds[i] = htContainerOrig.add(vv);
     
      i++;
    }

    if (isProbe) {
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

                            RawFragmentBatchProvider[] fragProviders) throws OutOfMemoryException {

    super(config, context);
    this.fragProviders = fragProviders;
    this.context = context;
    this.outgoingContainer = new VectorContainer();
    this.stats.setLongStat(Metric.NUM_SENDERS, config.getNumSenders());
    this.config = config;
  }
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    BatchMetaInfo aggBatchMetaInfo = new BatchMetaInfo();

    while (input.available() > 0) {
      VectorAccessibleSerializable vcSerializable = new VectorAccessibleSerializable(DumpCat.allocator);
      vcSerializable.readFromStream(input);
      VectorContainer vectorContainer = (VectorContainer) vcSerializable.get();

      aggBatchMetaInfo.add(getBatchMetaInfo(vcSerializable));

      if (vectorContainer.getRecordCount() == 0) {
        emptyBatchNum ++;
      }

      if (prevSchema != null && !vectorContainer.getSchema().equals(prevSchema)) {
        schemaChangeIdx.add(batchNum);
      }

      prevSchema = vectorContainer.getSchema();
      batchNum ++;

      vectorContainer.zeroVectors();
    }

    /* output the summary stat */
    System.out.println(String.format("Total # of batches: %d", batchNum));
    //output: rows, selectedRows, avg rec size, total data size.
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    while (input.available() > 0 && batchNum ++ < targetBatchNum) {
      vcSerializable = new VectorAccessibleSerializable(DumpCat.allocator);
      vcSerializable.readFromStream(input);

      if (batchNum != targetBatchNum) {
        VectorContainer vectorContainer = (VectorContainer) vcSerializable.get();
        vectorContainer.zeroVectors();
      }
    }

    if (batchNum < targetBatchNum) {
      System.out.println(String.format("Wrong input of batch # ! Total # of batch in the file is %d. Please input a number 0..%d as batch #", batchNum+1, batchNum));
      input.close();
      System.exit(-1);
    }

    if (vcSerializable != null) {
      showSingleBatch(vcSerializable, showHeader);
      VectorContainer vectorContainer = (VectorContainer) vcSerializable.get();
      vectorContainer.zeroVectors();
    }
  }
View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    }
  }


  private void showSingleBatch (VectorAccessibleSerializable vcSerializable, boolean showHeader) {
    VectorContainer vectorContainer = (VectorContainer)vcSerializable.get();

    /* show the header of the batch */
    if (showHeader) {
      System.out.println(getBatchMetaInfo(vcSerializable).toString());

View Full Code Here

Examples of org.apache.drill.exec.record.VectorContainer

    incoming.cleanup();
  }

  @Override
  public IterOutcome buildSchema() throws SchemaChangeException {
    VectorContainer c = new VectorContainer(oContext);
    stats.startProcessing();
    try {
      stats.stopProcessing();
      try {
        incoming.buildSchema();
      } finally {
        stats.startProcessing();
      }
      for (VectorWrapper w : incoming) {
        c.addOrGet(w.getField());
      }
      c = VectorContainer.canonicalize(c);
      for (VectorWrapper w : c) {
        container.add(w.getValueVector());
      }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.