Package org.apache.drill.exec.vector.complex.fn

Examples of org.apache.drill.exec.vector.complex.fn.JsonReaderWithState


    try{
      stream = fileSystem.open(hadoopPath);
      JsonRecordSplitter splitter = new UTF8JsonRecordSplitter(stream);
      this.writer = new VectorContainerWriter(output);
      this.mutator = output;
      jsonReader = new JsonReaderWithState(splitter);
    }catch(IOException e){
      throw new ExecutionSetupException("Failure reading JSON file.", e);
    }
  }
View Full Code Here


    MapVector v = new MapVector("", allocator);
    ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
    writer.allocate();


    JsonReaderWithState jsonReader = new JsonReaderWithState(new ReaderJSONRecordSplitter(compound));
    int i =0;
    List<Integer> batchSizes = Lists.newArrayList();

    outside: while(true){
      writer.setPosition(i);
      switch(jsonReader.write(writer)){
      case WRITE_SUCCEED:
        i++;
        break;
      case NO_MORE:
        batchSizes.add(i);
        System.out.println("no more records - main loop");
        break outside;

      case WRITE_FAILED:
        System.out.println("==== hit bounds at " + i);
        //writer.setValueCounts(i - 1);
        batchSizes.add(i);
        i = 0;
        writer.allocate();
        writer.reset();

        switch(jsonReader.write(writer)){
        case NO_MORE:
          System.out.println("no more records - new alloc loop.");
          break outside;
        case WRITE_FAILED:
          throw new RuntimeException("Failure while trying to write.");
View Full Code Here

    MapVector v = new MapVector("", allocator);
    ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
    writer.allocate();

    DrillBuf buffer = allocator.buffer(255);
    JsonReaderWithState jsonReader = new JsonReaderWithState(new ReaderJSONRecordSplitter(compound), buffer,
        GroupScan.ALL_COLUMNS, false);
    int i =0;
    List<Integer> batchSizes = Lists.newArrayList();

    outside: while(true){
      writer.setPosition(i);
      switch(jsonReader.write(writer)){
      case WRITE_SUCCEED:
        i++;
        break;
      case NO_MORE:
        batchSizes.add(i);
        System.out.println("no more records - main loop");
        break outside;

      case WRITE_FAILED:
        System.out.println("==== hit bounds at " + i);
        //writer.setValueCounts(i - 1);
        batchSizes.add(i);
        i = 0;
        writer.allocate();
        writer.reset();

        switch(jsonReader.write(writer)){
        case NO_MORE:
          System.out.println("no more records - new alloc loop.");
          break outside;
        case WRITE_FAILED:
          throw new RuntimeException("Failure while trying to write.");
View Full Code Here

        throw new ExecutionSetupException(e);
      }
    } else {
      try {
        this.writer = new VectorContainerWriter(output);
        this.jsonReaderWithState = new JsonReaderWithState(
            fragmentContext.getManagedBuffer(), columns, enableAllTextMode);
      } catch (IOException e) {
        throw new ExecutionSetupException(
            "Failure in Mongo JsonReader initialization.", e);
      }
View Full Code Here

    try{
      stream = fileSystem.open(hadoopPath);
      JsonRecordSplitter splitter = new UTF8JsonRecordSplitter(stream);
      this.writer = new VectorContainerWriter(output);
      this.mutator = output;
      jsonReader = new JsonReaderWithState(splitter, fragmentContext.getManagedBuffer(), columns, enableAllTextMode);
    }catch(Exception e){
      handleAndRaise("Failure reading JSON file.", e);
    }
  }
View Full Code Here

    MapVector v = new MapVector("", allocator);
    ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
    writer.allocate();

    DrillBuf buffer = allocator.buffer(255);
    JsonReaderWithState jsonReader = new JsonReaderWithState(new ReaderJSONRecordSplitter(compound), buffer,
        GroupScan.ALL_COLUMNS, false);
    int i =0;
    List<Integer> batchSizes = Lists.newArrayList();

    outside: while(true) {
      writer.setPosition(i);
      switch (jsonReader.write(writer)) {
      case WRITE_SUCCEED:
        i++;
        break;
      case NO_MORE:
        batchSizes.add(i);
        System.out.println("no more records - main loop");
        break outside;

      case WRITE_FAILED:
        System.out.println("==== hit bounds at " + i);
        //writer.setValueCounts(i - 1);
        batchSizes.add(i);
        i = 0;
        writer.allocate();
        writer.reset();

        switch(jsonReader.write(writer)) {
        case NO_MORE:
          System.out.println("no more records - new alloc loop.");
          break outside;
        case WRITE_FAILED:
          throw new RuntimeException("Failure while trying to write.");
View Full Code Here

    try{
      stream = fileSystem.open(hadoopPath);
      JsonRecordSplitter splitter = new UTF8JsonRecordSplitter(stream);
      this.writer = new VectorContainerWriter(output);
      this.mutator = output;
      jsonReader = new JsonReaderWithState(splitter, fragmentContext.getManagedBuffer(), columns, enableAllTextMode);
    }catch(Exception e){
      handleAndRaise("Failure reading JSON file.", e);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.drill.exec.vector.complex.fn.JsonReaderWithState

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.