Package org.apache.avro

Examples of org.apache.avro.Schema


               obj.get("version") instanceof Integer);
    RegisterResponseEntry rre = new RegisterResponseEntry((Integer)obj.get("id"),
                                                          ((Integer)obj.get("version")).shortValue(),
                                                          (String)obj.get("schema"));
    assertEquals("unexpected source id", 4002, rre.getId());
    Schema resSchema = Schema.parse(rre.getSchema());
    assertEquals("unexpected source-schema name for source id 4002", "test4.source2_v1", resSchema.getFullName());

    // There's no guarantee of a metadataSchemas entry in general, but we pre-stuffed our
    // VersionedSchemaSetBackedRegistryService with one in the test's constructor, so we
    // expect the relay to hand it back to us.  Or else.
/* disabled for now since simplistic relay implementation has been disabled; reenable/update/modify as part of DDSDBUS-2093/2096 (TODO)
View Full Code Here


    }
    assertNotNull("missing source schemas in response", sourceSchemasList);
    assertEquals("expected one source schema", 1, sourceSchemasList.size());
    RegisterResponseEntry rre = sourceSchemasList.get(0);
    assertEquals("unexpected source id", 4002, rre.getId());
    Schema resSchema = Schema.parse(rre.getSchema());
    assertEquals("unexpected source-schema name for source id 4002", "test4.source2_v1", resSchema.getFullName());

    // protocolVersion == 3:  as with v2 above; just do a quick sanity check
    httpRequest =
        new DefaultHttpRequest(HttpVersion.HTTP_1_1,
                               HttpMethod.GET,
View Full Code Here

    if(!_areFieldsFiltered)
    {
      return schema;
    }

    Schema avroSchema = Schema.parse(schema);
    ObjectMapper mapper = new ObjectMapper();
    JsonFactory factory = new JsonFactory();
    StringWriter writer = new StringWriter();
    JsonGenerator jgen = factory.createJsonGenerator(writer);
    jgen.useDefaultPrettyPrinter();

    @SuppressWarnings("checked")
    HashMap<String,Object> schemaMap = new ObjectMapper().readValue(schema, HashMap.class);

    @SuppressWarnings("checked")
    ArrayList<HashMap<String,String>> list = (ArrayList<HashMap<String, String>>) schemaMap.get("fields");

    int i=0;
    while(i < list.size())
    {
      Schema.Field field = avroSchema.getField(list.get(i).get("name"));
      String dbFieldName;

      if(field.schema().getType() == Schema.Type.ARRAY)
      {
View Full Code Here

      System.err.println("Input schema expected");
      cli.printUsage();
      System.exit(4);
    }

    Schema inputSchema = null;
    try
    {
      inputSchema = openSchema(inputSchemaName);
    }
    catch (IOException ioe)
    {
      System.err.println("Unable to open input schema: " + ioe);
      System.exit(2);
    }
    LOG.info("Using input schema:" + inputSchemaName);

    String outputSchemaName = cli.getOutputSchema(inputSchemaName);
    Schema outputSchema = null;
    try
    {
      outputSchema = outputSchemaName.equals(inputSchemaName) ? inputSchema : openSchema(outputSchemaName);
    }
    catch (IOException ioe)
View Full Code Here

      return false;
    }

    try
    {
      Schema fieldSchema = SchemaHelper.unwindUnionSchema(f)// == f.schema() if f is not a union
      Type avroFieldType = fieldSchema.getType();

      if (_sDebug)
      {
        LOG.debug("Checking for type:" + avroFieldType + ", Field:" + f.name() +
                  ", Exp:" + databaseFieldValue + ", Got:" + avroField);
      }
      switch (avroFieldType)
      {
        case BOOLEAN:
          assertEquals(f.name(),databaseFieldValue,avroField );
          break;
        case BYTES:
          byte[] byteArr = null;
          if (databaseFieldValue instanceof Blob)
          {
            Blob b = (Blob) databaseFieldValue;
            byteArr = b.getBytes(1,(int) b.length());
          }
          else
          {
            byteArr = (byte[])databaseFieldValue;
          }
          assertEquals(f.name(), byteArr, avroField);
          break;
        case DOUBLE:
          assertEquals(f.name(), new Double(((Number)databaseFieldValue).doubleValue()), (avroField));
          break;
        case FLOAT:
          assertEquals(f.name(), new Float(((Number)databaseFieldValue).floatValue()), (avroField));
          break;
        case INT:
          assertEquals(f.name(), Integer.valueOf(((Number)databaseFieldValue).intValue()), (avroField));
          break;
        case LONG:
          if(databaseFieldValue instanceof Number)
          {
              long lvalue = ((Number) databaseFieldValue).longValue();
              assertEquals(f.name(),lvalue,((Long)avroField).longValue());
          }
          else if(databaseFieldValue instanceof Timestamp)
          {
              long time = ((Timestamp) databaseFieldValue).getTime();
              assertEquals(f.name(),time,((Long)avroField).longValue());
          }
          else if(databaseFieldValue instanceof Date)
          {
              long time = ((Date) databaseFieldValue).getTime();
              assertEquals(f.name(),time,((Long)avroField).longValue());
          }
          else
          {
            Class timestampClass = null, dateClass = null;
            try
            {
              timestampClass = OracleJarUtils.loadClass("oracle.sql.TIMESTAMP");
              dateClass = OracleJarUtils.loadClass("oracle.sql.DATE");
            }
            catch (Exception e)
            {
              String errMsg = "Cannot convert " + databaseFieldValue.getClass() +
                              " to long. Unable to get Oracle datatypes " + e.getMessage();
              LOG.error(errMsg);
              throw new EventCreationException(errMsg);
            }

            if (timestampClass.isInstance(databaseFieldValue))
            {
              try
              {
                Object tsc = timestampClass.cast(databaseFieldValue);
                Method dateValueMethod = timestampClass.getMethod("dateValue");
                Date dateValue = (Date) dateValueMethod.invoke(tsc);
                long time = dateValue.getTime();
                assertEquals(f.name(),time,((Long)avroField).longValue());
              }
              catch(Exception ex)
              {
                String errMsg = "SQLException reading oracle.sql.TIMESTAMP value for field " + f.name();
                LOG.error(errMsg);
                throw new RuntimeException(errMsg, ex);
              }
            }
            else if (dateClass.isInstance(databaseFieldValue))
            {
              try
              {
                Object dsc = dateClass.cast(databaseFieldValue);
                Method dateValueMethod = dateClass.getMethod("dateValue");
                Date dateValue = (Date) dateValueMethod.invoke(dsc);
                long time = dateValue.getTime();
                assertEquals(f.name(),time,((Long)avroField).longValue());
              }
              catch (Exception ex)
              {
                String errMsg = "SQLException reading oracle.sql.DATE value for field " + f.name();
                LOG.error(errMsg);
                throw new RuntimeException(errMsg, ex);
              }
            }
            else
            {
              String errMsg = "Cannot convert " + databaseFieldValue.getClass() + " to long for field " + f.name();
              LOG.error(errMsg);
              throw new RuntimeException();
            }
          }
          break;
        case STRING:
          if (databaseFieldValue instanceof Clob)
          {
            String text = null;

            try
            {
              text = OracleAvroGenericEventFactory.extractClobText((Clob)databaseFieldValue, f.name());
            }
            catch (EventCreationException ex)
            {
              LOG.error("compareField error: " + ex.getMessage(), ex);
            }
            assertEquals(f.name(), text, ((Utf8)avroField).toString());
          }
          else
          {
            String text = databaseFieldValue.toString();
            assertEquals(f.name(), text, ((Utf8)avroField).toString());
          }
          break;
        case NULL:
          assertNull(f.name(), databaseFieldValue);
          assertNull(f.name(), avroField);
          break;
        case ARRAY:
          GenericArray<GenericRecord> avroArray = (GenericArray<GenericRecord>)avroField;
          Schema elementSchema = fieldSchema.getElementType();
          Array array = (Array)databaseFieldValue;
          ResultSet arrayResultSet = array.getResultSet();
          int i = 0;

          while (arrayResultSet.next())
          {
            // Get the underlying structure from the database. Oracle returns the structure in the
            // second column of the array's ResultSet
            Struct struct = (Struct) arrayResultSet.getObject(2);
            Object[] attributes = struct.getAttributes();

            GenericRecord avroElement = avroArray.get(i++);

            // Iterate over the fields in the JSON array of fields.
            // We can read the structure elements only by position, not by field name, so we
            // have to use dbFieldPosition recorded in the schema definition.
            for (Field field : elementSchema.getFields())
            {
              int dbFieldPosition = Integer.valueOf(SchemaHelper.getMetaField(field, "dbFieldPosition"));
              Object dbFieldValue = attributes[dbFieldPosition];
              Object avroFieldValue = avroElement.get(field.name());
              compareField(field, dbFieldValue, avroFieldValue);
View Full Code Here

    boolean result = true;
    for (Field avroField : fields)
    {
      int dbFieldPosition = 0;
      // this is just avroField.schema() if avroField isn't a union; else schema of first non-null subtype:
      Schema fieldSchema = SchemaHelper.unwindUnionSchema(avroField);
      Type avroFieldType = fieldSchema.getType();

      String dbFieldPositionStr = SchemaHelper.getMetaField(avroField, "dbFieldPosition");
      if (avroFieldType == Type.ARRAY)
      {
        if (null == dbFieldPositionStr || dbFieldPositionStr.isEmpty())
        {
          Schema elementSchema = fieldSchema.getElementType();
          dbFieldPositionStr = SchemaHelper.getMetaField(elementSchema, "dbFieldPosition");
        }
      }
      if (null != dbFieldPositionStr && !dbFieldPositionStr.isEmpty())
      {
View Full Code Here

    if (message instanceof GenericRecord) {
      GenericRecord record = (GenericRecord) message;
      populateAvroHeaders(hdrs, record.getSchema(), message);
      flumeEvent = EventBuilder.withBody(serialize(record, record.getSchema()), hdrs);
    } else if (message instanceof SpecificRecord || avroReflectionEnabled) {
      Schema schema = ReflectData.get().getSchema(message.getClass());
      populateAvroHeaders(hdrs, schema, message);
      flumeEvent = EventBuilder.withBody(serialize(message, schema), hdrs);
    } else {
      hdrs.put(Log4jAvroHeaders.MESSAGE_ENCODING.toString(), "UTF8");
      String msg = layout != null ? layout.format(event) : message.toString();
View Full Code Here

    DatasetDescriptor descriptor = view.getDataset().getDescriptor();
    String formatName = descriptor.getFormat().getName();
    Preconditions.checkArgument(allowedFormats().contains(formatName),
      "Unsupported format: " + formatName);

    Schema newSchema = descriptor.getSchema();
    if (targetSchema == null || !newSchema.equals(targetSchema)) {
      this.targetSchema = descriptor.getSchema();
      // target dataset schema has changed, invalidate all readers based on it
      readers.invalidateAll();
    }
View Full Code Here

      logStats();
    }
  }

  private Schema createAvroSchema() {
    Schema avroSchema = Schema.createRecord("Doc", "adoc", null, false);
    List<Field> fields = new ArrayList<Field>();
    fields.add(new Field("id", Schema.create(Type.STRING), null, null));
    fields.add(new Field("user_friends_count",
                         createOptional(Schema.create(Type.INT)),
                         null, null));
    fields.add(new Field("user_location",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("user_description",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("user_statuses_count",
                         createOptional(Schema.create(Type.INT)),
                         null, null));
    fields.add(new Field("user_followers_count",
                         createOptional(Schema.create(Type.INT)),
                         null, null));
    fields.add(new Field("user_name",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("user_screen_name",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("created_at",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("text",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("retweet_count",
                         createOptional(Schema.create(Type.LONG)),
                         null, null));
    fields.add(new Field("retweeted",
                         createOptional(Schema.create(Type.BOOLEAN)),
                         null, null));
    fields.add(new Field("in_reply_to_user_id",
                         createOptional(Schema.create(Type.LONG)),
                         null, null));
    fields.add(new Field("source",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("in_reply_to_status_id",
                         createOptional(Schema.create(Type.LONG)),
                         null, null));
    fields.add(new Field("media_url_https",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    fields.add(new Field("expanded_url",
                         createOptional(Schema.create(Type.STRING)),
                         null, null));
    avroSchema.setFields(fields);
    return avroSchema;
  }
View Full Code Here

    try {
      dataset = dsFactory.getDatasetRepository().load(dsFactory.getNamespace(), repoName);
      log.debug("Found dataset for " + repoName);
    }
    catch (DatasetNotFoundException ex) {
      Schema schema = datasetDefinition.getSchema(pojoClass);
      log.debug("Creating dataset for " + repoName + " using schema " + schema);
      if (recordClass != null && recordClass.isAssignableFrom(GenericRecord.class)) {
        Schema genericSchema = Schema.createRecord(
            "Generic"+schema.getName(),
            "Generic representation of " + schema.getName(),
            schema.getNamespace(),
            false);
        List<Schema.Field> fields = new ArrayList<Schema.Field>();
        for (Schema.Field f : schema.getFields()) {
          fields.add(new Schema.Field(f.name(), f.schema(), f.doc(), f.defaultValue()));
        }
        genericSchema.setFields(fields);
        schema = genericSchema;
      }
      DatasetDescriptor descriptor;
      if (datasetDefinition.getPartitionStrategy() == null) {
        descriptor = new DatasetDescriptor.Builder()
View Full Code Here

TOP

Related Classes of org.apache.avro.Schema

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.