Package org.apache.drill.exec.rpc.user

Examples of org.apache.drill.exec.rpc.user.QueryResultBatch


    List<QueryResultBatch> results = testPhysicalWithResults(queries[0]);
    assertEquals(2, results.size());
    // "`field_1`", "`field_3`.`inner_1`", "`field_3`.`inner_2`", "`field_4`.`inner_1`"

    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
    QueryResultBatch batch = results.get(0);
    assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
    assertEquals(5, batchLoader.getSchema().getFieldCount());
    testExistentColumns(batchLoader, batch);

    VectorWrapper vw = batchLoader.getValueAccessorById(
        NullableIntVector.class, //
        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("non_existent_at_root")).getFieldIds() //
    );
    assertNull(vw.getValueVector().getAccessor().getObject(0));
    assertNull(vw.getValueVector().getAccessor().getObject(1));
    assertNull(vw.getValueVector().getAccessor().getObject(2));

    vw = batchLoader.getValueAccessorById(
        NullableIntVector.class, //
        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("non_existent", "nested","field")).getFieldIds() //
    );
    assertNull(vw.getValueVector().getAccessor().getObject(0));
    assertNull(vw.getValueVector().getAccessor().getObject(1));
    assertNull(vw.getValueVector().getAccessor().getObject(2));

    vw.getValueVector().clear();
    batch.release();
    batchLoader.clear();
  }
View Full Code Here


        if (ex != null)
          throw ex;
        if (completed && queue.isEmpty()) {
          return null;
        } else {
          QueryResultBatch q = queue.poll(50, TimeUnit.MILLISECONDS);
          if (q != null) {
            if (!autoread && queue.size() < MAX / 2) {
              autoread = true;
              throttle.setAutoRead(true);
              throttle = null;
View Full Code Here

    }

    void close() {
      closed = true;
      while (!queue.isEmpty()) {
        QueryResultBatch qrb = queue.poll();
        if(qrb != null && qrb.getData() != null) qrb.getData().release();
      }
      completed = true;
    }
View Full Code Here

          throw ex;
        }
        if (completed && queue.isEmpty()) {
          return null;
        } else {
          QueryResultBatch q = queue.poll(50, TimeUnit.MILLISECONDS);
          if (q != null) {
            if (!autoread && queue.size() < MAX / 2) {
              autoread = true;
              throttle.setAutoRead(true);
              throttle = null;
View Full Code Here

    }

    void close() {
      closed = true;
      while (!queue.isEmpty()) {
        QueryResultBatch qrb = queue.poll();
        if (qrb != null && qrb.getData() != null) {
          qrb.getData().release();
        }
      }
      completed = true;
    }
View Full Code Here

      //int expectedBatchCount = 2;

      //assertEquals(expectedBatchCount, results.size());

      for (int i = 0; i < results.size(); ++i) {
        QueryResultBatch batch = results.get(i);
        if (i == 1) {
          assertTrue(batch.hasData());
        } else {
          assertFalse(batch.hasData());
          batch.release();
          continue;
        }

        assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
        boolean firstColumn = true;

        // print headers.
        System.out.println("\n\n========NEW SCHEMA=========\n\n");
        for (VectorWrapper<?> v : batchLoader) {

          if (firstColumn) {
            firstColumn = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(v.getField().toExpr());
          System.out.print("[");
          System.out.print(v.getField().getType().getMinorType());
          System.out.print("]");
        }

        System.out.println();


        for (int r = 0; r < batchLoader.getRecordCount(); r++) {
          boolean first = true;
          recordCount++;
          for (VectorWrapper<?> v : batchLoader) {
            if (first) {
              first = false;
            } else {
              System.out.print("\t");
            }

            ValueVector.Accessor accessor = v.getValueVector().getAccessor();
            System.out.print(accessor.getObject(r));
          }
          if (!first) {
            System.out.println();
          }
        }
        batchLoader.clear();
        batch.release();
      }

      assertEquals(2, recordCount);
    }
  }
View Full Code Here

      List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
          Files.toString(FileUtils.getResourceAsFile("/functions/cast/two_way_implicit_cast.json"), Charsets.UTF_8));

      RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

      QueryResultBatch batch = results.get(0);
      assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

      Iterator<VectorWrapper<?>> itr = batchLoader.iterator();

      ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
      ValueVector.Accessor varcharAccessor1 = itr.next().getValueVector().getAccessor();
View Full Code Here

                                                      BatchSchema schema) throws SchemaChangeException, UnsupportedEncodingException {
    // TODO - this does not handle schema changes
    Map<String, HyperVectorValueIterator> combinedVectors = new HashMap();

    long totalRecords = 0;
    QueryResultBatch batch;
    int size = records.size();
    for (int i = 0; i < size; i++) {
      batch = records.get(i);
      loader = new RecordBatchLoader(getAllocator());
      loader.load(batch.getHeader().getDef(), batch.getData());
      logger.debug("reading batch with " + loader.getRecordCount() + " rows, total read so far " + totalRecords);
      totalRecords += loader.getRecordCount();
      for (VectorWrapper w : loader) {
        String field = w.getField().toExpr();
        if ( ! combinedVectors.containsKey(field)) {
View Full Code Here

                                       BatchSchema schema) throws SchemaChangeException, UnsupportedEncodingException {
    // TODO - this does not handle schema changes
    Map<String, List> combinedVectors = new HashMap();

    long totalRecords = 0;
    QueryResultBatch batch;
    int size = records.size();
    for (int i = 0; i < size; i++) {
      batch = records.get(0);
      loader.load(batch.getHeader().getDef(), batch.getData());
      if (schema == null) {
        schema = loader.getSchema();
        for (MaterializedField mf : schema) {
          combinedVectors.put(mf.getPath().toExpr(), new ArrayList());
        }
      }
      logger.debug("reading batch with " + loader.getRecordCount() + " rows, total read so far " + totalRecords);
      totalRecords += loader.getRecordCount();
      for (VectorWrapper w : loader) {
        String field = w.getField().toExpr();
        for (int j = 0; j < loader.getRecordCount(); j++) {
          if (totalRecords - loader.getRecordCount() + j > 5000000) {
            continue;
          }
          Object obj = w.getValueVector().getAccessor().getObject(j);
          if (obj != null) {
            if (obj instanceof Text) {
              obj = obj.toString();
              if (obj.equals("")) {
                System.out.println(w.getField());
              }
            }
            else if (obj instanceof byte[]) {
              obj = new String((byte[]) obj, "UTF-8");
            }
          }
          combinedVectors.get(field).add(obj);
        }
      }
      records.remove(0);
      batch.release();
      loader.clear();
    }
    return combinedVectors;
  }
View Full Code Here

  }

  public void addToMaterializedResults(List<Map> materializedRecords,  List<QueryResultBatch> records, RecordBatchLoader loader,
                                       BatchSchema schema) throws SchemaChangeException, UnsupportedEncodingException {
    long totalRecords = 0;
    QueryResultBatch batch;
    int size = records.size();
    for (int i = 0; i < size; i++) {
      batch = records.get(0);
      loader.load(batch.getHeader().getDef(), batch.getData());
      if (schema == null) {
        schema = loader.getSchema();
      }
      logger.debug("reading batch with " + loader.getRecordCount() + " rows, total read so far " + totalRecords);
      totalRecords += loader.getRecordCount();
      for (int j = 0; j < loader.getRecordCount(); j++) {
        HashMap<String, Object> record = new HashMap<>();
        for (VectorWrapper w : loader) {
          Object obj = w.getValueVector().getAccessor().getObject(j);
          if (obj != null) {
            if (obj instanceof Text) {
              obj = obj.toString();
              if (obj.equals("")) {
                System.out.println(w.getField());
              }
            }
            else if (obj instanceof byte[]) {
              obj = new String((byte[]) obj, "UTF-8");
            }
            record.put(w.getField().toExpr(), obj);
          }
          record.put(w.getField().toExpr(), obj);
        }
        materializedRecords.add(record);
      }
      records.remove(0);
      batch.release();
      loader.clear();
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.drill.exec.rpc.user.QueryResultBatch

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.