Package org.apache.avro.generic

Examples of org.apache.avro.generic.GenericRecord


  }
 
 
  public GenericRecord generateRecord() throws UnknownTypeException
  {
    GenericRecord subRecord = new GenericData.Record(field.schema());
    for(Field field : this.field.schema().getFields())
    {
      SchemaFiller fill;
      fill = SchemaFiller.createRandomField(field);
      fill.writeToRecord(subRecord);
View Full Code Here


    {
      LOG.error("The schema first level must be record.");
      return null;
    }

    GenericRecord record = new GenericData.Record(schema);
    for(Field field : schema.getFields() )
    {
      SchemaFiller schemaFill = SchemaFiller.createRandomField(field);
      schemaFill.writeToRecord(record);
    }
View Full Code Here

    //get string length and check if min is greater than 0

    // Generate the record
    File schemaFile = new File(fileLoc);
    DataGenerator dataGenerator = new DataGenerator(schemaFile);
    GenericRecord record = dataGenerator.generateRandomRecord();
    if (cmd.hasOption(PRINT_AVRO_JSON_OPTNAME))
    {
      String outname = cmd.getOptionValue(PRINT_AVRO_JSON_OPTNAME);
      OutputStream outs = System.out;
      if (!outname.equals("-"))
View Full Code Here

      if(name.startsWith(prefix)) {
        FSDataInputStream input = fs.open(new Path(outputFile));
        DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
        DataFileStream<GenericRecord> avroStream =
            new DataFileStream<GenericRecord>(input, reader);
        GenericRecord record = new GenericData.Record(avroStream.getSchema());
        while (avroStream.hasNext()) {
          avroStream.next(record);
          ByteBuffer body = (ByteBuffer) record.get("body");
          CharsetDecoder decoder = Charsets.UTF_8.newDecoder();
          String bodyStr = decoder.decode(body).toString();
          LOG.debug("Removing event: {}", bodyStr);
          bodies.remove(bodyStr);
          found++;
View Full Code Here

      }
    }

    @Override
    public GenericRecord map(Tuple input) {
      GenericRecord record = createRecord();
      for (int i = 0; i < input.size(); i++) {
        Object v = input.get(i);
        if (v == null) {
          record.put(i, null);
        } else {
          record.put(i, fns.get(i).map(v));
        }
      }
      return record;
    }
View Full Code Here

      }

      // If we get any GenericRecord types, convert them to our own specific types
      // if we can figure out which to use. This makes toString'ing prettier.
      if (fieldVal instanceof GenericRecord) {
        GenericRecord record = (GenericRecord) fieldVal;
        if (record.getSchema().equals(TimestampBase.SCHEMA$)) {
          fieldVal = new Timestamp((Long) record.get("milliseconds"),
              (Long) record.get("nanos"));
        }
      }

      if (null == fieldVal) {
        sb.append("null");
View Full Code Here

    }
  }

  private GenericRecord toGenericRecord(SqoopRecord val) {
    Map<String, Object> fieldMap = val.getFieldMap();
    GenericRecord record = new GenericData.Record(schema);
    for (Map.Entry<String, Object> entry : fieldMap.entrySet()) {
      record.put(entry.getKey(), toAvro(entry.getValue()));
    }
    return record;
  }
View Full Code Here

    runImport(getArgv());

    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
    DataFileReader<GenericRecord> reader = read(outputFile);
    GenericRecord record = reader.next();

    // Verify that blob data is imported as Avro bytes.
    ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
    String returnVal = new String(buf.array());

    assertEquals(getColName(0), expectedVal, returnVal);
  }
View Full Code Here

    // written to an external file.
    runImport(getArgv("--inline-lob-limit", "1"));

    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
    DataFileReader<GenericRecord> reader = read(outputFile);
    GenericRecord record = reader.next();

    // Verify that the reference file is written in Avro bytes.
    ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
    String returnVal = new String(buf.array());
    String expectedStart = "externalLob(lf,_lob/large_obj";
    String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
      + data.length() + ")";
View Full Code Here

    runImport(getArgv("--compression-codec", CodecMap.DEFLATE));

    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
    DataFileReader<GenericRecord> reader = read(outputFile);
    GenericRecord record = reader.next();

    // Verify that the data block of the Avro file is compressed with deflate
    // codec.
    assertEquals(CodecMap.DEFLATE,
        reader.getMetaString(DataFileConstants.CODEC));

    // Verify that all columns are imported correctly.
    ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
    String returnVal = new String(buf.array());

    assertEquals(getColName(0), expectedVal, returnVal);
  }
View Full Code Here

TOP

Related Classes of org.apache.avro.generic.GenericRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.