Package org.apache.avro.file

Examples of org.apache.avro.file.DataFileWriter$BufferedFileOutputStream


    AvroType avroType = (AvroType)recordCollection.getPType();
    if (avroType == null) {
      throw new IllegalStateException("Can't write a non-typed Avro collection");
    }
    DatumWriter datumWriter = Avros.newWriter((AvroType)recordCollection.getPType());
    DataFileWriter dataFileWriter = new DataFileWriter(datumWriter);
    dataFileWriter.create(avroType.getSchema(), outputStream);

    for (Object record : recordCollection.materialize()) {
      dataFileWriter.append(avroType.getOutputMapFn().map(record));
    }

    dataFileWriter.close();
    outputStream.close();
  }
View Full Code Here


                    String name, Progressable prog)
    throws IOException {

    Schema schema = AvroJob.getOutputSchema(job);
   
    final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter());

    if (FileOutputFormat.getCompressOutput(job)) {
      int level = job.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY,
                             CodecFactory.DEFAULT_DEFLATE_LEVEL);
      writer.setCodec(CodecFactory.deflateCodec(level));
    }

    Path path =
      FileOutputFormat.getTaskOutputPath(job, name+AvroOutputFormat.EXT);
    writer.create(schema, path.getFileSystem(job).create(path));

    return new RecordWriter<TetherData, NullWritable>() {
        public void write(TetherData datum, NullWritable ignore)
          throws IOException {
          writer.appendEncoded(datum.buffer());
        }
        public void close(Reporter reporter) throws IOException {
          writer.close();
        }
      };
  }
View Full Code Here

      schema = new ReflectDataFactory().getReflectData().getSchema(r.getClass());
    }

    GenericDatumWriter genericDatumWriter = new GenericDatumWriter(schema);

    DataFileWriter dataFileWriter = new DataFileWriter(genericDatumWriter);
    dataFileWriter.create(schema, outputStream);

    for (Object record : genericRecords) {
      dataFileWriter.append(record);
    }

    dataFileWriter.close();
    outputStream.close();
  }
View Full Code Here

    AvroType avroType = (AvroType)recordCollection.getPType();
    if (avroType == null) {
      throw new IllegalStateException("Can't write a non-typed Avro collection");
    }
    DatumWriter datumWriter = Avros.newWriter((AvroType)recordCollection.getPType());
    DataFileWriter dataFileWriter = new DataFileWriter(datumWriter);
    dataFileWriter.create(avroType.getSchema(), outputStream);

    for (Object record : recordCollection.materialize()) {
      dataFileWriter.append(avroType.getOutputMapFn().map(record));
    }

    dataFileWriter.close();
    outputStream.close();
  }
View Full Code Here

    AvroType avroType = (AvroType)recordCollection.getPType();
    if (avroType == null) {
      throw new IllegalStateException("Can't write a non-typed Avro collection");
    }
    DatumWriter datumWriter = Avros.newWriter((AvroType)recordCollection.getPType());
    DataFileWriter dataFileWriter = new DataFileWriter(datumWriter);
    dataFileWriter.create(avroType.getSchema(), outputStream);

    for (Object record : recordCollection.materialize()) {
      dataFileWriter.append(avroType.getOutputMapFn().map(record));
    }

    dataFileWriter.close();
    outputStream.close();
  }
View Full Code Here

      schema = new ReflectDataFactory().getReflectData().getSchema(r.getClass());
    }

    GenericDatumWriter genericDatumWriter = new GenericDatumWriter(schema);

    DataFileWriter dataFileWriter = new DataFileWriter(genericDatumWriter);
    dataFileWriter.create(schema, outputStream);

    for (Object record : genericRecords) {
      dataFileWriter.append(record);
    }

    dataFileWriter.close();
    outputStream.close();
  }
View Full Code Here

                    String name, Progressable prog)
    throws IOException {

    Schema schema = AvroJob.getOutputSchema(job);
   
    final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter());

    if (FileOutputFormat.getCompressOutput(job)) {
      int level = job.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY,
                             AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
      writer.setCodec(CodecFactory.deflateCodec(level));
    }

    Path path =
      FileOutputFormat.getTaskOutputPath(job, name+AvroOutputFormat.EXT);
    writer.create(schema, path.getFileSystem(job).create(path));

    return new RecordWriter<TetherData, NullWritable>() {
        public void write(TetherData datum, NullWritable ignore)
          throws IOException {
          writer.appendEncoded(datum.buffer());
        }
        public void close(Reporter reporter) throws IOException {
          writer.close();
        }
      };
  }
View Full Code Here

 
  private void ingestAndVerifyAvro(Schema schema, GenericData.Record... records) throws IOException {
    deleteAllDocuments();
   
    GenericDatumWriter datum = new GenericDatumWriter(schema);
    DataFileWriter writer = new DataFileWriter(datum);
    writer.setMeta("Meta-Key0", "Meta-Value0");
    writer.setMeta("Meta-Key1", "Meta-Value1");
    ByteArrayOutputStream bout = new ByteArrayOutputStream();
    writer.create(schema, bout);
    for (GenericData.Record record : records) {
      writer.append(record);
    }
    writer.flush();
    writer.close();

    DataFileReader<GenericData.Record> reader = new DataFileReader(new ReadAvroContainerBuilder.ForwardOnlySeekableInputStream(new ByteArrayInputStream(bout.toByteArray())), new GenericDatumReader());
    Schema schema2 = reader.getSchema();
    assertEquals(schema, schema2);
    for (GenericData.Record record : records) {
View Full Code Here

      return super.doProcess(outputRecord);
    }

    @SuppressWarnings("unchecked")
    private void writeContainer(Record src, OutputStream dst) {
      DataFileWriter dataFileWriter = null;
      try {
        try {
          Schema schema = null;
          for (Object attachment : src.get(Fields.ATTACHMENT_BODY)) {
            Preconditions.checkNotNull(attachment);
            GenericContainer datum = (GenericContainer) attachment;
            schema = getSchema(datum, schema);
            assert schema != null;
            if (dataFileWriter == null) { // init
              GenericDatumWriter datumWriter = new GenericDatumWriter(schema);
              dataFileWriter = new DataFileWriter(datumWriter);
              if (codecFactory != null) {
                dataFileWriter.setCodec(codecFactory);
              }
              for (Map.Entry<String,String> entry : metadata.entrySet()) {
                dataFileWriter.setMeta(entry.getKey(), entry.getValue());             
              }
              dataFileWriter.create(schema, dst);
            }
            dataFileWriter.append(datum);
          }
          if (dataFileWriter != null) {
            dataFileWriter.flush();
          }
        } catch (IOException e) {
          throw new MorphlineRuntimeException(e);
        }
      } finally {
View Full Code Here

      // pass record to next command in chain:
      return super.doProcess(outputRecord);
    }

    private void writeContainer(Record src, OutputStream dst) {
      DataFileWriter dataFileWriter = null;
      try {
        try {
          Schema schema = null;
          for (Object attachment : src.get(Fields.ATTACHMENT_BODY)) {
            Preconditions.checkNotNull(attachment);
            GenericContainer datum = (GenericContainer) attachment;
            schema = getSchema(datum, schema);
            if (dataFileWriter == null) { // init
              GenericDatumWriter datumWriter = new GenericDatumWriter(schema);
              dataFileWriter = new DataFileWriter(datumWriter);
              if (codecFactory != null) {
                dataFileWriter.setCodec(codecFactory);
              }
              for (Map.Entry<String,String> entry : metadata.entrySet()) {
                dataFileWriter.setMeta(entry.getKey(), entry.getValue());             
              }
              dataFileWriter.create(schema, dst);
            }
            dataFileWriter.append(datum);
          }
          if (dataFileWriter != null) {
            dataFileWriter.flush();
          }
        } catch (IOException e) {
          throw new MorphlineRuntimeException(e);
        }
      } finally {
View Full Code Here

TOP

Related Classes of org.apache.avro.file.DataFileWriter$BufferedFileOutputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.