use of com.linkedin.avroutil1.model.AvroSchemaField in project avro-util by linkedin.
the class AvscParser method parseNamedSchema.
private AvroNamedSchema parseNamedSchema(JsonObjectExt objectNode, AvscFileParseContext context, AvroType avroType, CodeLocation codeLocation, JsonPropertiesContainer extraProps) {
AvroName schemaName = parseSchemaName(objectNode, context, avroType);
List<AvroName> aliases = parseAliases(objectNode, context, avroType, schemaName);
// technically the avro spec does not allow "doc" on type fixed, but screw that
Located<String> docStr = getOptionalString(objectNode, "doc");
String doc = docStr != null ? docStr.getValue() : null;
boolean namespaceChanged = false;
// check if context namespace changed
if (!context.getCurrentNamespace().equals(schemaName.getNamespace())) {
context.pushNamespace(schemaName.getNamespace());
namespaceChanged = true;
}
AvroNamedSchema namedSchema;
switch(avroType) {
case RECORD:
AvroRecordSchema recordSchema = new AvroRecordSchema(codeLocation, schemaName, aliases, doc, extraProps);
JsonArrayExt fieldsNode = getRequiredArray(objectNode, "fields", () -> "all avro records must have fields");
List<AvroSchemaField> fields = new ArrayList<>(fieldsNode.size());
for (int fieldNum = 0; fieldNum < fieldsNode.size(); fieldNum++) {
// !=null
JsonValueExt fieldDeclNode = (JsonValueExt) fieldsNode.get(fieldNum);
JsonValue.ValueType fieldNodeType = fieldDeclNode.getValueType();
if (fieldNodeType != JsonValue.ValueType.OBJECT) {
throw new AvroSyntaxException("field " + fieldNum + " for record " + schemaName.getSimpleName() + " at " + fieldDeclNode.getStartLocation() + " expected to be an OBJECT, not a " + JsonPUtil.describe(fieldNodeType) + " (" + fieldDeclNode + ")");
}
TextLocation fieldStartLocation = Util.convertLocation(fieldDeclNode.getStartLocation());
TextLocation fieldEndLocation = Util.convertLocation(fieldDeclNode.getEndLocation());
CodeLocation fieldCodeLocation = new CodeLocation(context.getUri(), fieldStartLocation, fieldEndLocation);
JsonObjectExt fieldDecl = (JsonObjectExt) fieldDeclNode;
Located<String> fieldName = getRequiredString(fieldDecl, "name", () -> "all record fields must have a name");
JsonValueExt fieldTypeNode = getRequiredNode(fieldDecl, "type", () -> "all record fields must have a type");
Located<String> locatedDocField = getOptionalString(fieldDecl, "doc");
String docField = locatedDocField == null ? null : locatedDocField.getValue();
SchemaOrRef fieldSchema = parseSchemaDeclOrRef(fieldTypeNode, context, false);
JsonValueExt fieldDefaultValueNode = fieldDecl.get("default");
AvroLiteral defaultValue = null;
if (fieldDefaultValueNode != null) {
if (fieldSchema.isResolved()) {
LiteralOrIssue defaultValurOrIssue = parseLiteral(fieldDefaultValueNode, fieldSchema.getSchema(), fieldName.getValue(), context);
if (defaultValurOrIssue.getIssue() == null) {
defaultValue = defaultValurOrIssue.getLiteral();
}
// TODO - handle issues
} else {
// TODO - implement delayed default value parsing
throw new UnsupportedOperationException("delayed parsing of default value for " + fieldName.getValue() + " TBD");
}
}
LinkedHashMap<String, JsonValueExt> props = parseExtraProps(fieldDecl, CORE_FIELD_PROPERTIES);
JsonPropertiesContainer propsContainer = props.isEmpty() ? JsonPropertiesContainer.EMPTY : new JsonPropertiesContainerImpl(props);
AvroSchemaField field = new AvroSchemaField(fieldCodeLocation, fieldName.getValue(), docField, fieldSchema, defaultValue, propsContainer);
fields.add(field);
}
recordSchema.setFields(fields);
namedSchema = recordSchema;
break;
case ENUM:
JsonArrayExt symbolsNode = getRequiredArray(objectNode, "symbols", () -> "all avro enums must have symbols");
List<String> symbols = new ArrayList<>(symbolsNode.size());
for (int ordinal = 0; ordinal < symbolsNode.size(); ordinal++) {
JsonValueExt symbolNode = (JsonValueExt) symbolsNode.get(ordinal);
JsonValue.ValueType symbolNodeType = symbolNode.getValueType();
if (symbolNodeType != JsonValue.ValueType.STRING) {
throw new AvroSyntaxException("symbol " + ordinal + " for enum " + schemaName.getSimpleName() + " at " + symbolNode.getStartLocation() + " expected to be a STRING, not a " + JsonPUtil.describe(symbolNodeType) + " (" + symbolNode + ")");
}
symbols.add(symbolNode.toString());
}
String defaultSymbol = null;
Located<String> defaultStr = getOptionalString(objectNode, "default");
if (defaultStr != null) {
defaultSymbol = defaultStr.getValue();
if (!symbols.contains(defaultSymbol)) {
context.addIssue(AvscIssues.badEnumDefaultValue(locationOf(context.getUri(), defaultStr), defaultSymbol, schemaName.getSimpleName(), symbols));
// TODO - support "fixing" by selecting 1st symbol as default?
defaultSymbol = null;
}
}
namedSchema = new AvroEnumSchema(codeLocation, schemaName, aliases, doc, symbols, defaultSymbol, extraProps);
break;
case FIXED:
JsonValueExt sizeNode = getRequiredNode(objectNode, "size", () -> "fixed types must have a size property");
if (sizeNode.getValueType() != JsonValue.ValueType.NUMBER || !(((JsonNumberExt) sizeNode).isIntegral())) {
throw new AvroSyntaxException("size for fixed " + schemaName.getSimpleName() + " at " + sizeNode.getStartLocation() + " expected to be an INTEGER, not a " + JsonPUtil.describe(sizeNode.getValueType()) + " (" + sizeNode + ")");
}
int fixedSize = ((JsonNumberExt) sizeNode).intValue();
Parsed<AvroLogicalType> logicalTypeResult = parseLogicalType(objectNode, context, avroType, codeLocation);
if (logicalTypeResult.hasIssues()) {
context.addIssues(logicalTypeResult.getIssues());
}
namedSchema = new AvroFixedSchema(codeLocation, schemaName, aliases, doc, fixedSize, logicalTypeResult.getData(), extraProps);
break;
default:
throw new IllegalStateException("unhandled: " + avroType + " for object at " + codeLocation.getStart());
}
if (namespaceChanged) {
context.popNamespace();
}
return namedSchema;
}
use of com.linkedin.avroutil1.model.AvroSchemaField in project avro-util by linkedin.
the class AvscParserTest method testParsingProperties.
@Test
public void testParsingProperties() throws Exception {
String avsc = TestUtil.load("schemas/TestRecordWithProperties.avsc");
AvscParser parser = new AvscParser();
AvscParseResult result = parser.parse(avsc);
Assert.assertNull(result.getParseError());
AvroRecordSchema schema = (AvroRecordSchema) result.getTopLevelSchema();
Assert.assertNotNull(schema);
Assert.assertEquals(schema.propertyNames(), // order is important
Arrays.asList("extraNullProp", "extraBooleanProp", "extraIntProp", "extraFloatProp", "extraStringProp", "extraArrayProp", "extraObjectProp"));
Assert.assertNull(schema.getPropertyAsJsonLiteral("noSuchProp"));
Assert.assertNull(schema.getPropertyAsObject("noSuchProp"));
Assert.assertEquals(schema.getPropertyAsJsonLiteral("extraNullProp"), "null");
Assert.assertEquals(schema.getPropertyAsObject("extraNullProp"), JsonPropertiesContainer.NULL_VALUE);
Assert.assertEquals(schema.getPropertyAsJsonLiteral("extraBooleanProp"), "true");
Assert.assertEquals(schema.getPropertyAsObject("extraBooleanProp"), Boolean.TRUE);
Assert.assertEquals(schema.getPropertyAsJsonLiteral("extraIntProp"), "42");
Assert.assertEquals(schema.getPropertyAsObject("extraIntProp"), new BigDecimal(42));
Assert.assertEquals(schema.getPropertyAsJsonLiteral("extraFloatProp"), "4.2");
// new BigDecimal(4.2d) is actually 4.20000000000000017763568394002504646778106689453125
// we love floating point precision
Assert.assertEquals(schema.getPropertyAsObject("extraFloatProp"), new BigDecimal("4.2"));
Assert.assertEquals(schema.getPropertyAsJsonLiteral("extraStringProp"), "\"a string\"");
Assert.assertEquals(schema.getPropertyAsObject("extraStringProp"), "a string");
JSONAssert.assertEquals("[null, 0, false, \"wow\", {\"this\" : \"makes\", \"little\" : \"sense\"}]", schema.getPropertyAsJsonLiteral("extraArrayProp"), JSONCompareMode.STRICT);
Assert.assertEquals(schema.getPropertyAsObject("extraArrayProp"), Arrays.asList(JsonPropertiesContainer.NULL_VALUE, new BigDecimal(0), Boolean.FALSE, "wow", new LinkedHashMap<String, Object>() {
{
put("this", "makes");
put("little", "sense");
}
}));
JSONAssert.assertEquals("{\"thats\": [\"all\", \"folks\"]}", schema.getPropertyAsJsonLiteral("extraObjectProp"), JSONCompareMode.STRICT);
Assert.assertEquals(schema.getPropertyAsObject("extraObjectProp"), new LinkedHashMap<String, Object>() {
{
put("thats", Arrays.asList("all", "folks"));
}
});
AvroSchemaField stringField = schema.getField("stringField");
Assert.assertEquals(stringField.propertyNames(), Collections.singletonList("fieldStringProp"));
Assert.assertEquals(stringField.getPropertyAsJsonLiteral("fieldStringProp"), "\"fieldStringValue\"");
Assert.assertEquals(stringField.getPropertyAsObject("fieldStringProp"), "fieldStringValue");
AvroSchema stringSchema = stringField.getSchema();
Assert.assertEquals(stringSchema.propertyNames(), Arrays.asList("avro.java.string", "typeStringProp"));
AvroSchema uuidSchema = schema.getField("uuidField").getSchema();
Assert.assertEquals(uuidSchema.propertyNames(), Collections.singletonList("logicalType"));
}
use of com.linkedin.avroutil1.model.AvroSchemaField in project avro-util by linkedin.
the class AvscParserTest method testSelfReference.
@Test
public void testSelfReference() throws Exception {
String avsc = TestUtil.load("schemas/LongList.avsc");
AvscParser parser = new AvscParser();
AvscParseResult result = parser.parse(avsc);
Assert.assertNull(result.getParseError());
AvroRecordSchema schema = (AvroRecordSchema) result.getTopLevelSchema();
// schema.next[1] == schema
AvroSchemaField nextField = schema.getField("next");
AvroUnionSchema union = (AvroUnionSchema) nextField.getSchema();
SchemaOrRef secondBranch = union.getTypes().get(1);
Assert.assertSame(secondBranch.getSchema(), schema);
}
use of com.linkedin.avroutil1.model.AvroSchemaField in project avro-util by linkedin.
the class AvscParserTest method testParsingStringTypes.
@Test
public void testParsingStringTypes() throws Exception {
String avsc = TestUtil.load("schemas/TestRecordWithStringTypes.avsc");
AvscParser parser = new AvscParser();
AvscParseResult result = parser.parse(avsc);
Assert.assertNull(result.getParseError());
AvroRecordSchema schema = (AvroRecordSchema) result.getTopLevelSchema();
Assert.assertNotNull(schema);
for (AvroSchemaField field : schema.getFields()) {
Assert.assertEquals(field.getSchema().type(), AvroType.STRING);
AvroPrimitiveSchema strSchema = (AvroPrimitiveSchema) field.getSchema();
switch(field.getName()) {
case "vanillaStringField":
Assert.assertNull(strSchema.getJavaStringRepresentation());
break;
case "stringFieldWithStringJavaType":
Assert.assertEquals(strSchema.getJavaStringRepresentation(), AvroJavaStringRepresentation.STRING);
break;
case "stringFieldWithMisplacedCharSequenceJavaType":
Assert.assertNull(strSchema.getJavaStringRepresentation());
// TODO - look for a warning about the misplaced value
break;
case "stringFieldWithCharSequenceJavaType":
Assert.assertEquals(strSchema.getJavaStringRepresentation(), AvroJavaStringRepresentation.CHAR_SEQUENCE);
break;
case "stringFieldWithUtf8JavaType":
Assert.assertEquals(strSchema.getJavaStringRepresentation(), AvroJavaStringRepresentation.UTF8);
break;
}
}
}
use of com.linkedin.avroutil1.model.AvroSchemaField in project avro-util by linkedin.
the class AvscParserTest method testParsingHorribleLogicalTypes.
@Test
public void testParsingHorribleLogicalTypes() throws Exception {
String avsc = TestUtil.load("schemas/TestRecordWithHorribleLogicalTypes.avsc");
AvscParser parser = new AvscParser();
AvscParseResult result = parser.parse(avsc);
Assert.assertNull(result.getParseError());
AvroRecordSchema schema = (AvroRecordSchema) result.getTopLevelSchema();
for (AvroSchemaField field : schema.getFields()) {
Assert.assertNull(field.getSchema().logicalType(), "field " + field.getName() + " should not have a successfully-parsed logicalType");
}
}
Aggregations