Package org.apache.drill.exec.exception

Examples of org.apache.drill.exec.exception.SchemaChangeException


                case OK_NEW_SCHEMA:
                    if (rightSchema == null) {
                        rightSchema = right.getSchema();

                        if (rightSchema.getSelectionVectorMode() != BatchSchema.SelectionVectorMode.NONE) {
                          throw new SchemaChangeException("Hash join does not support build batch with selection vectors");
                        }
                        setupHashTable();
                    } else {
                        throw new SchemaChangeException("Hash join does not support schema changes");
                    }
                // Fall through
                case OK:
                    int currentRecordCount = right.getRecordCount();
View Full Code Here


          logger.debug("Creating New Worker");
          stats.startSetup();
          this.worker = generateNewWorker();
          first = true;
        } catch (ClassTransformationException | IOException | SchemaChangeException e) {
          context.fail(new SchemaChangeException(e));
          kill(false);
          return IterOutcome.STOP;
        } finally {
          stats.stopSetup();
        }
View Full Code Here

      Copier copier = context.getImplementationClass(cg);
      copier.setupRemover(context, incoming, this);

      return copier;
    } catch (ClassTransformationException | IOException e) {
      throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }
  }
View Full Code Here

      Copier copier = context.getImplementationClass(cg);
      copier.setupRemover(context, batch, outgoing);

      return copier;
    } catch (ClassTransformationException | IOException e) {
      throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }
  }
View Full Code Here

    final List<TransferPair> transfers = Lists.newArrayList();
    final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION4, context.getFunctionRegistry());

    final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry());
    if(collector.hasErrors()){
      throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
    }

    cg.addExpr(new ReturnValueExpression(expr));

//    for(VectorWrapper<?> i : incoming){
//      ValueVector v = TypeHelper.getNewVector(i.getField(), context.getAllocator());
//      container.add(v);
//      allocators.add(getAllocator4(v));
//    }

    for (VectorWrapper<?> vw : incoming) {
      for (ValueVector vv : vw.getValueVectors()) {
        TransferPair pair = vv.getTransferPair();
        container.add(pair.getTo());
        transfers.add(pair);
      }
    }

    // allocate outgoing sv4
    container.buildSchema(SelectionVectorMode.FOUR_BYTE);

    try {
      TransferPair[] tx = transfers.toArray(new TransferPair[transfers.size()]);
      Filterer filter = context.getImplementationClass(cg);
      filter.setup(context, incoming, this, tx);
      return filter;
    } catch (ClassTransformationException | IOException e) {
      throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }

  }
View Full Code Here

    final List<TransferPair> transfers = Lists.newArrayList();
    final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION2, context.getFunctionRegistry());

    final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry());
    if(collector.hasErrors()){
      throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
    }

    cg.addExpr(new ReturnValueExpression(expr));

    for(VectorWrapper<?> v : incoming){
      TransferPair pair = v.getValueVector().getTransferPair();
      container.add(pair.getTo());
      transfers.add(pair);
    }

    container.buildSchema(SelectionVectorMode.TWO_BYTE);

    try {
      TransferPair[] tx = transfers.toArray(new TransferPair[transfers.size()]);
      Filterer filter = context.getImplementationClass(cg);
      filter.setup(context, incoming, this, tx);
      return filter;
    } catch (ClassTransformationException | IOException e) {
      throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }

  }
View Full Code Here

      ValueVector v = fieldVectorMap.get(field.key());

      if (v == null || v.getClass() != clazz) {
        // Field does not exist add it to the map and the output container
        v = TypeHelper.getNewVector(field, oContext.getAllocator());
        if(!clazz.isAssignableFrom(v.getClass())) throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
        container.add(v);
        fieldVectorMap.put(field.key(), v);

        // Adding new vectors to the container mark that the schema has changed
        schemaChange = true;
View Full Code Here

    return batches.isEmpty();
  }

  public void build(FragmentContext context, VectorContainer outputContainer) throws SchemaChangeException{
    outputContainer.clear();
    if(batches.keySet().size() > 1) throw new SchemaChangeException("Sort currently only supports a single schema.");
    if(batches.size() > Character.MAX_VALUE) throw new SchemaChangeException("Sort cannot work on more than %d batches at a time.", (int) Character.MAX_VALUE);
    if(batches.keys().size() < 1){
      assert false : "Invalid to have an empty set of batches with no schemas.";
    }
    sv4 = new SelectionVector4(svAllocator.getAllocation(), recordCount, Character.MAX_VALUE);
    BatchSchema schema = batches.keySet().iterator().next();
View Full Code Here

    for(Ordering od : orderings){
      // first, we rewrite the evaluation stack for each side of the comparison.
      ErrorCollector collector = new ErrorCollectorImpl();
      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
      if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
      g.setMappingSet(leftMapping);
      HoldingContainer left = g.addExpr(expr, false);
      g.setMappingSet(rightMapping);
      HoldingContainer right = g.addExpr(expr, false);
      g.setMappingSet(mainMapping);
View Full Code Here

    for(Ordering od : popConfig.getOrderings()){
      // first, we rewrite the evaluation stack for each side of the comparison.
      ErrorCollector collector = new ErrorCollectorImpl();
      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector,context.getFunctionRegistry());
      if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
      g.setMappingSet(LEFT_MAPPING);
      HoldingContainer left = g.addExpr(expr, false);
      g.setMappingSet(RIGHT_MAPPING);
      HoldingContainer right = g.addExpr(expr, false);
      g.setMappingSet(MAIN_MAPPING);
View Full Code Here

TOP

Related Classes of org.apache.drill.exec.exception.SchemaChangeException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.