use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class DynamicRecordMetadata method buildSchema.
/**
* Build the schema of a {@link DynamicRecordTemplate}.
*
* @param name the name of the record.
* @param fieldDefs the fields of the record.
* @throws IllegalArgumentException if the {@link com.linkedin.data.schema.RecordDataSchema.Field} of the fieldDefs
* are already set.
*/
public static RecordDataSchema buildSchema(String name, Collection<? extends FieldDef<?>> fieldDefs) {
StringBuilder errorMessageBuilder = new StringBuilder();
RecordDataSchema schema = new RecordDataSchema(new Name(name, errorMessageBuilder), RecordDataSchema.RecordType.RECORD);
List<RecordDataSchema.Field> fields = new ArrayList<>(fieldDefs.size());
for (FieldDef<?> fieldDef : fieldDefs) {
RecordDataSchema.Field paramField = fieldDef.getField();
if (paramField.getRecord() != null) {
throw new IllegalArgumentException("Attempt to assign field " + fieldDef.getName() + " to record " + schema.getName() + "failed: " + "Record of field is already set to " + paramField.getRecord().getName());
}
paramField.setRecord(schema);
fields.add(paramField);
}
schema.setFields(fields, errorMessageBuilder);
return schema;
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class DataSchemaAnnotationValidator method buildSchemaValidators.
/**
* Build a cache of {@link Validator}s declared for the specified schema.
*
* @param schema to cache {@link Validator}s for.
* @return the cache if successful.
*/
private IdentityHashMap<Object, List<Validator>> buildSchemaValidators(DataSchema schema) {
final IdentityHashMap<Object, List<Validator>> map = new IdentityHashMap<>();
DataSchemaTraverse traverse = new DataSchemaTraverse();
traverse.traverse(schema, new DataSchemaTraverse.Callback() {
@Override
public void callback(List<String> path, DataSchema schema) {
List<Validator> validatorList = map.get(schema);
if (validatorList == null) {
Object validateObject = schema.getProperties().get(VALIDATE);
if (validateObject == null) {
validatorList = NO_VALIDATORS;
} else {
validatorList = buildValidatorList(validateObject, path, schema);
}
map.put(schema, validatorList);
if (schema.getType() == DataSchema.Type.RECORD) {
RecordDataSchema recordDataSchema = (RecordDataSchema) schema;
for (RecordDataSchema.Field field : recordDataSchema.getFields()) {
validateObject = field.getProperties().get(VALIDATE);
if (validateObject == null) {
validatorList = NO_VALIDATORS;
} else {
path.add(field.getName());
validatorList = buildValidatorList(validateObject, path, field);
path.remove(path.size() - 1);
}
map.put(field, validatorList);
}
} else if (schema.getType() == DataSchema.Type.UNION) {
UnionDataSchema unionDataSchema = (UnionDataSchema) schema;
// Only aliased unions can have custom properties (and thus validators).
if (unionDataSchema.areMembersAliased()) {
for (UnionDataSchema.Member member : unionDataSchema.getMembers()) {
validateObject = member.getProperties().get(VALIDATE);
if (validateObject == null) {
validatorList = NO_VALIDATORS;
} else {
path.add(member.getAlias());
validatorList = buildValidatorList(validateObject, path, member);
path.remove(path.size() - 1);
}
map.put(member, validatorList);
}
}
}
}
}
});
return map;
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class CompatibilityChecker method check.
private void check(DataSchema older, DataSchema newer) {
Checked toCheck = new Checked(older, newer);
if (_checked.contains(toCheck)) {
return;
}
_checked.add(toCheck);
if (older == newer) {
return;
}
int pathCount = 1;
if (_options.getMode() == CompatibilityOptions.Mode.DATA || _options.getMode() == CompatibilityOptions.Mode.EXTENSION) {
older = older.getDereferencedDataSchema();
while (newer.getType() == DataSchema.Type.TYPEREF) {
TyperefDataSchema typerefDataSchema = ((TyperefDataSchema) newer);
_path.addLast(typerefDataSchema.getFullName());
_path.addLast(DataSchemaConstants.REF_KEY);
pathCount++;
newer = typerefDataSchema.getRef();
}
}
if (newer.getType() == DataSchema.Type.TYPEREF) {
_path.addLast(((TyperefDataSchema) newer).getFullName());
} else {
_path.addLast(newer.getUnionMemberKey());
}
switch(newer.getType()) {
case TYPEREF:
if (isSameType(older, newer))
checkTyperef((TyperefDataSchema) older, (TyperefDataSchema) newer);
break;
case RECORD:
if (isSameType(older, newer))
checkRecord((RecordDataSchema) older, (RecordDataSchema) newer);
break;
case ARRAY:
if (isSameType(older, newer))
checkArray((ArrayDataSchema) older, (ArrayDataSchema) newer);
break;
case MAP:
if (isSameType(older, newer))
checkMap((MapDataSchema) older, (MapDataSchema) newer);
break;
case ENUM:
if (isSameType(older, newer))
checkEnum((EnumDataSchema) older, (EnumDataSchema) newer);
break;
case FIXED:
if (isSameType(older, newer))
checkFixed((FixedDataSchema) older, (FixedDataSchema) newer);
break;
case UNION:
if (isSameType(older, newer))
checkUnion((UnionDataSchema) older, (UnionDataSchema) newer);
break;
default:
if (newer instanceof PrimitiveDataSchema)
checkPrimitive(older, newer);
else
throw new IllegalStateException("Unknown schema type " + newer.getType() + ", checking old schema " + older + ", new schema " + newer);
break;
}
for (; pathCount > 0; pathCount--) {
_path.removeLast();
}
return;
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class DataSchemaRichContextTraverser method doRecursiveTraversal.
private void doRecursiveTraversal(TraverserContextImpl context) {
// Add full name to the context's TraversePath
DataSchema schema = context.getCurrentSchema();
ArrayDeque<String> path = context.getTraversePath();
path.add(schema.getUnionMemberKey());
// visitors
_schemaVisitor.callbackOnContext(context, DataSchemaTraverse.Order.PRE_ORDER);
/**
* By default {@link DataSchemaRichContextTraverser} will only decide whether or not keep traversing based on whether the new
* data schema has been seen.
*
* But the {@link SchemaVisitor} has the chance to override this control by setting {@link TraverserContext#_shouldContinue}
* If this variable set to be {@link Boolean#TRUE}, the {@link DataSchemaRichContextTraverser} will traverse to next level (if applicable)
* If this variable set to be {@link Boolean#FALSE}, the {@link DataSchemaRichContextTraverser} will stop traversing to next level
* If this variable not set, the {@link DataSchemaRichContextTraverser} will decide whether or not to continue traversing based on whether
* this data schema has been seen.
*/
if (context.shouldContinue() == Boolean.TRUE || !(context.shouldContinue() == Boolean.FALSE || _seenAncestorsDataSchema.containsKey(schema))) {
_seenAncestorsDataSchema.put(schema, Boolean.TRUE);
// Pass new context in every recursion
TraverserContextImpl nextContext = null;
switch(schema.getType()) {
case TYPEREF:
TyperefDataSchema typerefDataSchema = (TyperefDataSchema) schema;
nextContext = context.getNextContext(DataSchemaConstants.REF_KEY, null, typerefDataSchema.getRef(), CurrentSchemaEntryMode.TYPEREF_REF);
doRecursiveTraversal(nextContext);
break;
case MAP:
// traverse key
MapDataSchema mapDataSchema = (MapDataSchema) schema;
nextContext = context.getNextContext(DataSchemaConstants.MAP_KEY_REF, DataSchemaConstants.MAP_KEY_REF, mapDataSchema.getKey(), CurrentSchemaEntryMode.MAP_KEY);
doRecursiveTraversal(nextContext);
// then traverse values
nextContext = context.getNextContext(PathSpec.WILDCARD, PathSpec.WILDCARD, mapDataSchema.getValues(), CurrentSchemaEntryMode.MAP_VALUE);
doRecursiveTraversal(nextContext);
break;
case ARRAY:
ArrayDataSchema arrayDataSchema = (ArrayDataSchema) schema;
nextContext = context.getNextContext(PathSpec.WILDCARD, PathSpec.WILDCARD, arrayDataSchema.getItems(), CurrentSchemaEntryMode.ARRAY_VALUE);
doRecursiveTraversal(nextContext);
break;
case RECORD:
RecordDataSchema recordDataSchema = (RecordDataSchema) schema;
for (RecordDataSchema.Field field : recordDataSchema.getFields()) {
nextContext = context.getNextContext(field.getName(), field.getName(), field.getType(), CurrentSchemaEntryMode.FIELD);
nextContext.setEnclosingField(field);
doRecursiveTraversal(nextContext);
}
break;
case UNION:
UnionDataSchema unionDataSchema = (UnionDataSchema) schema;
for (UnionDataSchema.Member member : unionDataSchema.getMembers()) {
nextContext = context.getNextContext(member.getUnionMemberKey(), member.getUnionMemberKey(), member.getType(), CurrentSchemaEntryMode.UNION_MEMBER);
nextContext.setEnclosingUnionMember(member);
doRecursiveTraversal(nextContext);
}
break;
default:
// will stop recursively traversing if the current schema is a leaf node.
assert isLeafSchema(schema);
break;
}
_seenAncestorsDataSchema.remove(schema);
}
_schemaVisitor.callbackOnContext(context, DataSchemaTraverse.Order.POST_ORDER);
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class TestProjectionMaskApplier method testBuildSchemaByProjectionNonexistentFields.
@Test
public void testBuildSchemaByProjectionNonexistentFields() {
RecordDataSchema schema = (RecordDataSchema) DataTemplateUtil.getSchema(RecordTemplateWithPrimitiveKey.class);
DataMap projectionMask = buildProjectionMaskDataMap("id", "nonexistentFieldFooBar");
try {
buildSchemaByProjection(schema, projectionMask);
} catch (InvalidProjectionException e) {
Assert.assertEquals(e.getMessage(), "Projected field \"nonexistentFieldFooBar\" not present in schema \"" + schema.getFullName() + "\"");
return;
}
Assert.fail("Building schema by projection with nonexistent fields should throw an InvalidProjectionException");
}
Aggregations