Package org.apache.hadoop.hive.ql.exec.FileSinkOperator

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter


      throw new HiveException(e);
    }

    for (String p : paths) {
      Path path = new Path(p);
      RecordWriter writer = HiveFileFormatUtils.getRecordWriter(
          jc, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), path);
      writer.close(false);
      LOG.info("created empty bucket for enforcing bucketing at " + path);
    }
  }
View Full Code Here


      pathToPartitionInfo.put(newPath.toUri().toString(), pDesc);
    }
    work.setPathToPartitionInfo(pathToPartitionInfo);

    String onefile = newPath.toString();
    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath,
        Text.class, false, props, null);
    recWriter.close(false);
    FileInputFormat.addInputPaths(job, onefile);
    return numEmptyPaths;
  }
View Full Code Here

    final boolean walEnabled = HiveConf.getBoolVar(
        jc, HiveConf.ConfVars.HIVE_HBASE_WAL_ENABLED);
    final HTable table = new HTable(HBaseConfiguration.create(jc), hbaseTableName);
    table.setAutoFlush(false);

    return new RecordWriter() {

      @Override
      public void close(boolean abort) throws IOException {
        if (!abort) {
          table.flushCommits();
View Full Code Here

    final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc,
        fs, finalOutPath, HiveKey.class, NullWritable.class, isCompressed);

    keyWritable = new HiveKey();
    keyIsText = valueClass.equals(Text.class);
    return new RecordWriter() {
      public void write(Writable r) throws IOException {
        if (keyIsText) {
          Text text = (Text) r;
          keyWritable.set(text.getBytes(), 0, text.getLength());
        } else {
View Full Code Here

    FileSystem fs = finalOutPath.getFileSystem(jc);
    final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc,
        fs, finalOutPath, BytesWritable.class, valueClass, isCompressed);

    return new RecordWriter() {
      public void write(Writable r) throws IOException {
        outStream.append(EMPTY_KEY, r);
      }

      public void close(boolean abort) throws IOException {
View Full Code Here

      pathToPartitionInfo.put(newPath.toUri().toString(), pDesc);
    }
    work.setPathToPartitionInfo(pathToPartitionInfo);

    String onefile = newPath.toString();
    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath,
        Text.class, false, props, null);
    recWriter.close(false);
    FileInputFormat.addInputPaths(job, onefile);
    return numEmptyPaths;
  }
View Full Code Here

                        Path outPath,
                        Class<? extends Writable> valueClass,
                        boolean isCompressed,
                        Properties tableProperties,
                        Progressable progress) throws IOException {
    final RecordWriter result =
      super.getHiveRecordWriter(jc,outPath,valueClass,isCompressed,
        tableProperties,progress);
    final Reporter reporter = (Reporter) progress;
    reporter.setStatus("got here");
    System.out.println("Got a reporter " + reporter);
    return new RecordWriter() {
      @Override
      public void write(Writable w) throws IOException {
        if (w instanceof Text) {
          Text value = (Text) w;
          Rot13InputFormat.rot13(value.getBytes(), 0, value.getLength());
          result.write(w);
        } else if (w instanceof BytesWritable) {
          BytesWritable value = (BytesWritable) w;
          Rot13InputFormat.rot13(value.getBytes(), 0, value.getLength());
          result.write(w);
        } else {
          throw new IllegalArgumentException("need text or bytes writable " +
            " instead of " + w.getClass().getName());
        }
      }

      @Override
      public void close(boolean abort) throws IOException {
        result.close(abort);
      }
    };
  }
View Full Code Here

      Properties tableProperties, Progressable progress) throws IOException {

    FileSystem fs = outPath.getFileSystem(jc);
    final OutputStream outStream = fs.create(outPath);

    return new RecordWriter() {
      @Override
      public void write(Writable r) throws IOException {
        if (r instanceof Text) {
          Text tr = (Text) r;
          outStream.write(tr.getBytes(), 0, tr.getLength());
View Full Code Here

    final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc,
        fs, finalOutPath, HiveKey.class, NullWritable.class, isCompressed);

    keyWritable = new HiveKey();
    keyIsText = valueClass.equals(Text.class);
    return new RecordWriter() {
      @Override
      public void write(Writable r) throws IOException {
        if (keyIsText) {
          Text text = (Text) r;
          keyWritable.set(text.getBytes(), 0, text.getLength());
View Full Code Here

    FileSystem fs = finalOutPath.getFileSystem(jc);
    final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc,
        fs, finalOutPath, BytesWritable.class, valueClass, isCompressed);

    return new RecordWriter() {
      @Override
      public void write(Writable r) throws IOException {
        outStream.append(EMPTY_KEY, r);
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.