Package org.apache.drill.common.exceptions

Examples of org.apache.drill.common.exceptions.ExecutionSetupException


  public RecordBatch getWriterBatch(FragmentContext context, RecordBatch incoming, ParquetWriter writer)
          throws ExecutionSetupException {
    try {
      return new WriterRecordBatch(writer, incoming, context, getRecordWriter(context, writer));
    } catch(IOException e) {
      throw new ExecutionSetupException(String.format("Failed to create the WriterRecordBatch. %s", e.getMessage()), e);
    }
  }
View Full Code Here


        }else if(type == String.class){
          w = new StringWriter(f, output.getManagedBuffer());
        }else if (type == Timestamp.class) {
          w = new NTimeStampWriter(f);
        }else{
          throw new ExecutionSetupException(String.format("PojoRecord reader doesn't yet support conversions from type [%s].", type));
        }
        writers.add(w);
        w.init(output);
      }

      this.writers = writers.toArray(new PojoWriter[writers.size()]);

    }catch(SchemaChangeException e){
      throw new ExecutionSetupException("Failure while setting up schema for PojoRecordReader.", e);
    }


  }
View Full Code Here

  public RecordBatch getWriterBatch(FragmentContext context, RecordBatch incoming, EasyWriter writer)
      throws ExecutionSetupException {
    try {
      return new WriterRecordBatch(writer, incoming, context, getRecordWriter(context, writer));
    } catch(IOException e) {
      throw new ExecutionSetupException(String.format("Failed to create the WriterRecordBatch. %s", e.getMessage()), e);
    }
  }
View Full Code Here

          }
        }
      }
    } catch (SchemaChangeException e) {
      throw new ExecutionSetupException(e);
    } catch (Exception e) {
      throw new ExecutionSetupException(e);
    }
  }
View Full Code Here

            parquet.column.Encoding.valueOf(pageHeader.dictionary_page_header.encoding.name())
        );
        this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
      }
    } catch (IOException e) {
      throw new ExecutionSetupException("Error opening or reading metadata for parquet file at location: "
        + path.getName(), e);
    }

  }
View Full Code Here

      try {
        readers.add(
            new HBaseRecordReader(subScan.getStorageConfig().getHBaseConf(), scanSpec, subScan.getColumns(), context)
        );
      } catch (Exception e1) {
        throw new ExecutionSetupException(e1);
      }
    }
    return new ScanBatch(subScan, context, readers.iterator());
  }
View Full Code Here

          hbaseTable, hbaseConf.get(HConstants.ZOOKEEPER_QUORUM),
          hbaseConf.get(HBASE_ZOOKEEPER_PORT), hbaseConf.get(HConstants.ZOOKEEPER_ZNODE_PARENT));
      hTable = new HTable(hbaseConf, hbaseTable);
      resultScanner = hTable.getScanner(hbaseScan);
    } catch (SchemaChangeException | IOException e) {
      throw new ExecutionSetupException(e);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.drill.common.exceptions.ExecutionSetupException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.