Examples of RecordWriter


Examples of com.netflix.astyanax.util.RecordWriter

                .append("3, arielle, landau, 6, c\n")
                .append("4, eyal, landau, 2, d\n");

        RecordReader reader = new CsvRecordReader(new StringReader(
                sb.toString()));
        RecordWriter writer = new ColumnarRecordWriter(keyspace,
                CF_USERS.getName());

        try {
            reader.start();
            writer.start();
            List<Pair<String, String>> record = null;
            while (null != (record = reader.next())) {
                writer.write(record);
            }
        } catch (IOException e) {
            LOG.error(e.getMessage(), e);
            Assert.fail();
        } catch (ConnectionException e) {
            LOG.error(e.getMessage(), e);
            Assert.fail();
        } finally {
            reader.shutdown();
            writer.shutdown();
        }

        try {
            Rows<Long, String> rows = keyspace.prepareQuery(CF_USERS)
                    .getAllRows().execute().getResult();
View Full Code Here

Examples of org.apache.drill.exec.store.RecordWriter

    options.put("separator", ((TextFormatConfig)getConfig()).getDelimiter());
    options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);

    options.put("extension", ((TextFormatConfig)getConfig()).getExtensions().get(0));

    RecordWriter recordWriter = new DrillTextRecordWriter(context.getAllocator());
    recordWriter.init(options);

    return recordWriter;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

      throw new HiveException(e);
    }

    for (String p : paths) {
      Path path = new Path(p);
      RecordWriter writer = HiveFileFormatUtils.getRecordWriter(
          jc, hiveOutputFormat, outputClass, isCompressed,
          tableInfo.getProperties(), path, reporter);
      writer.close(false);
      LOG.info("created empty bucket for enforcing bucketing at " + path);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

    newPath = fs.makeQualified(newPath);
    String newFile = newDir + File.separator + "emptyFile";
    Path newFilePath = new Path(newFile);

    String onefile = newPath.toString();
    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath,
        Text.class, false, props, null);
    if (dummyRow) {
      // empty files are omitted at CombineHiveInputFormat.
      // for meta-data only query, it effectively makes partition columns disappear..
      // this could be fixed by other methods, but this seemed to be the most easy (HIVEV-2955)
      recWriter.write(new Text("empty"))// written via HiveIgnoreKeyTextOutputFormat
    }
    recWriter.close(false);

    return newPath;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

                        Path outPath,
                        Class<? extends Writable> valueClass,
                        boolean isCompressed,
                        Properties tableProperties,
                        Progressable progress) throws IOException {
    final RecordWriter result =
      super.getHiveRecordWriter(jc,outPath,valueClass,isCompressed,
        tableProperties,progress);
    final Reporter reporter = (Reporter) progress;
    reporter.setStatus("got here");
    System.out.println("Got a reporter " + reporter);
    return new RecordWriter() {
      @Override
      public void write(Writable w) throws IOException {
        if (w instanceof Text) {
          Text value = (Text) w;
          Rot13InputFormat.rot13(value.getBytes(), 0, value.getLength());
          result.write(w);
        } else if (w instanceof BytesWritable) {
          BytesWritable value = (BytesWritable) w;
          Rot13InputFormat.rot13(value.getBytes(), 0, value.getLength());
          result.write(w);
        } else {
          throw new IllegalArgumentException("need text or bytes writable " +
            " instead of " + w.getClass().getName());
        }
      }

      @Override
      public void close(boolean abort) throws IOException {
        result.close(abort);
      }
    };
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

      pathToPartitionInfo.put(newPath.toUri().toString(), pDesc);
    }
    work.setPathToPartitionInfo(pathToPartitionInfo);

    String onefile = newPath.toString();
    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath,
        Text.class, false, props, null);
    recWriter.close(false);
    FileInputFormat.addInputPaths(job, onefile);
    return numEmptyPaths;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

      throw new HiveException(e);
    }

    for (String p : paths) {
      Path path = new Path(p);
      RecordWriter writer = HiveFileFormatUtils.getRecordWriter(
          jc, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), path);
      writer.close(false);
      LOG.info("created empty bucket for enforcing bucketing at " + path);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

      pathToPartitionInfo.put(newPath.toUri().toString(), pDesc);
    }
    work.setPathToPartitionInfo(pathToPartitionInfo);

    String onefile = newPath.toString();
    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath, Text.class, false, new Properties(), null);
    recWriter.close(false);
    FileInputFormat.addInputPaths(job, onefile);
    return numEmptyPaths;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

      pathToPartitionInfo.put(newPath.toUri().toString(), pDesc);
    }
    work.setPathToPartitionInfo(pathToPartitionInfo);

    String onefile = newPath.toString();
    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newFilePath,
        Text.class, false, props, null);
    if (oneRow) {
      // empty files are ommited at CombineHiveInputFormat.
      // for metadata only query, it effectively makes partition columns disappear..
      // this could be fixed by other methods, but this seemed to be the most easy (HIVEV-2955)
      recWriter.write(new Text("empty"))// written via HiveIgnoreKeyTextOutputFormat
    }
    recWriter.close(false);
    FileInputFormat.addInputPaths(job, onefile);
    return numEmptyPaths;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter

      throw new HiveException(e);
    }

    for (String p : paths) {
      Path path = new Path(p);
      RecordWriter writer = HiveFileFormatUtils.getRecordWriter(
          jc, hiveOutputFormat, outputClass, isCompressed,
          tableInfo.getProperties(), path, reporter);
      writer.close(false);
      LOG.info("created empty bucket for enforcing bucketing at " + path);
    }
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.