use of com.linkedin.data.schema.TyperefDataSchema in project rest.li by linkedin.
the class PathSpecBasedSchemaAnnotationVisitor method callbackOnContext.
@Override
public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order order) {
if (order == DataSchemaTraverse.Order.POST_ORDER) {
// Use post order visit to validate override paths
VisitorContext postVisitContext = context.getVisitorContext();
List<AnnotationEntry> annotationEntries = ((PathSpecTraverseVisitorContext) postVisitContext).getAnnotationEntriesFromParentSchema();
// Do annotationEntry validity checking
for (AnnotationEntry annotationEntry : annotationEntries) {
if (annotationEntry.isOverride() && (annotationEntry.getOverridePathValidStatus() == AnnotationEntry.OverridePathValidStatus.UNCHECKED)) {
markAnnotationEntryInvalid(annotationEntry, OverridePathErrorMsg.DOES_NOT_MATCH_NAME);
}
}
if (context.getParentSchema() == null) {
getSchemaVisitorTraversalResult().setConstructedSchema(_schemaConstructed);
}
return;
}
VisitorContext visitorContext = context.getVisitorContext();
// Prepare visitorContext for next level recursion
PathSpecTraverseVisitorContext newVisitorContext = new PathSpecTraverseVisitorContext();
// {@link PathSpecBasedSchemaAnnotationVisitor} will build new skeleton schema on the fly, after seeing the original schema
// If there has been a skeleton schema already built for one data schema, it will reuse that cached one
// also see {@link PathSpecTraverseVisitorContext}
DataSchema newSchema = null;
DataSchema parentSchema = context.getParentSchema();
DataSchema currentSchema = context.getCurrentSchema();
List<AnnotationEntry> currentAnnotationEntries = ((PathSpecTraverseVisitorContext) visitorContext).getAnnotationEntriesFromParentSchema();
// match & filter current overrides
if (parentSchema != null && !(parentSchema.getType() == DataSchema.Type.TYPEREF)) {
// skip if parent is Typeref because schemaPathSpec would not contain Typeref component.
String pathSpecMatchingSegment = context.getSchemaPathSpec().peekLast();
currentAnnotationEntries = currentAnnotationEntries.stream().filter(annotationEntry -> (annotationEntry.getOverridePathValidStatus() == AnnotationEntry.OverridePathValidStatus.UNCHECKED) && annotationEntry.getRemainingPaths().size() > 0 && Objects.equals(annotationEntry.getRemainingPaths().peekFirst(), pathSpecMatchingSegment)).peek(annotationEntry -> {
annotationEntry.getMatchedPaths().add(pathSpecMatchingSegment);
annotationEntry.getRemainingPaths().pollFirst();
}).collect(toList());
}
assert (currentAnnotationEntries.stream().filter(AnnotationEntry::isOverride).allMatch(annotationEntry -> annotationEntry.getOverridePathValidStatus() == AnnotationEntry.OverridePathValidStatus.UNCHECKED));
// add {@link annotationEntry}s from enclosing schema or field
if (parentSchema != null) {
switch(parentSchema.getType()) {
case RECORD:
RecordDataSchema.Field enclosingField = context.getEnclosingField();
ArrayDeque<String> fullTraversePath = new ArrayDeque<>(context.getTraversePath());
// Need to exclude this currentSchema's path so that it is field's path
fullTraversePath.pollLast();
currentAnnotationEntries.addAll(generateAnnotationEntryFromField(enclosingField, fullTraversePath));
break;
case TYPEREF:
currentAnnotationEntries.addAll(generateAnnotationEntryFromTypeRefSchema((TyperefDataSchema) parentSchema, context.getTraversePath()));
break;
default:
break;
}
}
// add {@link annotationEntry}s from named schema
currentAnnotationEntries.addAll(generateAnnotationEntryFromNamedSchema(currentSchema, context.getTraversePath()));
// Note: cyclic annotation in TypeRef is also handled through its de-referenced record schema
if (currentSchema.getType() == DataSchema.Type.RECORD) {
String currentSchemaFullName = ((RecordDataSchema) currentSchema).getFullName();
for (AnnotationEntry annotationEntry : currentAnnotationEntries) {
String overrideStartSchemaName = annotationEntry.getStartSchemaName();
if (detectCycle(overrideStartSchemaName, currentSchemaFullName)) {
// If cycles found, report errors
getSchemaVisitorTraversalResult().addMessage(context.getTraversePath(), "Found overrides that forms a cyclic-referencing: Overrides entry in " + "traverser path \"%s\" with its pathSpec value \"%s\" is pointing to the field " + "with traverser path \"%s\" and schema name \"%s\", this is causing cyclic-referencing.", new PathSpec(annotationEntry.getPathToAnnotatedTarget().toArray(new String[0])).toString(), annotationEntry.getOverridePathSpecStr(), new PathSpec(context.getTraversePath().toArray(new String[0])).toString(), currentSchemaFullName);
context.setShouldContinue(Boolean.FALSE);
newVisitorContext.setAnnotationEntriesFromParentSchema(currentAnnotationEntries);
context.setVisitorContext(newVisitorContext);
return;
} else {
// If no cycles found, add to current edges seen
_directedEdges.computeIfAbsent(overrideStartSchemaName, key -> new HashSet<>()).add(currentSchemaFullName);
}
}
}
// process current schema
try {
if (DataSchemaRichContextTraverser.isLeafSchema(currentSchema)) {
newSchema = createOrReUseSchemaAndAttachToParent(context, (currentAnnotationEntries.size() != 0));
newSchema.getResolvedProperties().putAll(resolveAnnotationEntries(currentAnnotationEntries, context.getSchemaPathSpec()));
// Do annotationEntry validity checking
for (AnnotationEntry annotationEntry : currentAnnotationEntries) {
if (annotationEntry.isOverride()) {
if (annotationEntry.getRemainingPaths().size() == 0) {
annotationEntry.setOverridePathValidStatus(AnnotationEntry.OverridePathValidStatus.VALID);
} else {
markAnnotationEntryInvalid(annotationEntry, OverridePathErrorMsg.TOO_LONG);
}
}
}
} else if (currentSchema.isComplex()) {
// Either all non-overrides to TypeRefDataSchema, or all overrides to other complex dataSchema
assert (currentAnnotationEntries.stream().noneMatch(AnnotationEntry::isOverride) || currentAnnotationEntries.stream().allMatch(AnnotationEntry::isOverride));
// Do annotationEntry validity checking
if ((currentSchema.getType() != DataSchema.Type.TYPEREF)) {
for (AnnotationEntry annotationEntry : currentAnnotationEntries) {
if (annotationEntry.isOverride() && (annotationEntry.getRemainingPaths().size() == 0)) {
markAnnotationEntryInvalid(annotationEntry, OverridePathErrorMsg.TOO_SHORT);
}
}
}
if (currentAnnotationEntries.stream().anyMatch(annotationEntry -> // non-overrides from typeref
!annotationEntry.isOverride() || (annotationEntry.getOverridePathValidStatus() == AnnotationEntry.OverridePathValidStatus.UNCHECKED))) {
// If there are unresolved annotation entries that resolving to complex data schema and its descendants.
// Need to tell the traverser to continue traversing
newSchema = createOrReUseSchemaAndAttachToParent(context, true);
context.setShouldContinue(Boolean.TRUE);
} else {
// Order matters: Need to check "seen" before creating new or reuse
context.setShouldContinue(!_seenDataSchemaMapping.containsKey(currentSchema));
newSchema = createOrReUseSchemaAndAttachToParent(context, false);
}
}
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(String.format("encounter unexpected CloneNotSupportedException at traverse path location %s", Arrays.toString(context.getTraversePath().toArray())), e);
}
// Process record schema with "included" fields, before setting overrides for next visitorContext
currentAnnotationEntries.addAll(generateAnnotationEntryFromInclude(currentSchema, context.getTraversePath()));
newVisitorContext.setAnnotationEntriesFromParentSchema(currentAnnotationEntries);
newVisitorContext.setOutputParentSchema(newSchema);
context.setVisitorContext(newVisitorContext);
}
use of com.linkedin.data.schema.TyperefDataSchema in project rest.li by linkedin.
the class AnnotationCheckResolvedPropertiesVisitor method callbackOnContext.
@Override
public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order order) {
// Only execute this callback when order is PRE_ORDER.
if (order == DataSchemaTraverse.Order.POST_ORDER) {
return;
}
DataSchema currentSchema = context.getCurrentSchema();
RecordDataSchema.Field schemaField = context.getEnclosingField();
UnionDataSchema.Member unionMember = context.getEnclosingUnionMember();
ArrayDeque<String> pathToSchema = context.getSchemaPathSpec().clone();
pathToSchema.addFirst(((NamedDataSchema) context.getTopLevelSchema()).getName());
// to avoid this node and it's child node have the same pathSpec.
if (currentSchema instanceof TyperefDataSchema) {
context.getSchemaPathSpec().addLast(TYPEREF_INDICATOR);
}
if (schemaField != null && pathToSchema.getLast().equals(context.getEnclosingField().getName())) {
// Current node is a field of a record schema, get the field's annotation.
// Add FIELD_INDICATOR in the pathSpec to differentiate field annotation and field type schema annotation.
pathToSchema.addLast(FIELD_INDICATOR);
PathSpec pathSpec = new PathSpec(pathToSchema);
_nodeToResolvedPropertiesMap.put(pathSpec, new ImmutablePair<>(generateCompatibilityCheckContext(schemaField, unionMember, currentSchema, pathSpec), chooseProperties(schemaField.getResolvedProperties(), schemaField.getProperties())));
pathToSchema.removeLast();
} else if (unionMember != null && pathToSchema.getLast().equals(context.getEnclosingUnionMember().getUnionMemberKey())) {
// Current node is a union member, get the union member key's annotation.
// Add UNION_MEMBER_KEY_INDICATOR in the pathSpec to differentiate union member key annotation and union type schema annotation.
pathToSchema.addLast(UNION_MEMBER_KEY_INDICATOR);
PathSpec pathSpec = new PathSpec(pathToSchema);
_nodeToResolvedPropertiesMap.put(pathSpec, new ImmutablePair<>(generateCompatibilityCheckContext(schemaField, unionMember, currentSchema, pathSpec), unionMember.getProperties()));
pathToSchema.removeLast();
}
// If there are no resolvedProperties but properties, used the properties for annotation check.
Map<String, Object> properties = chooseProperties(currentSchema.getResolvedProperties(), currentSchema.getProperties());
PathSpec pathSpec = new PathSpec(pathToSchema);
_nodeToResolvedPropertiesMap.put(pathSpec, new ImmutablePair<>(generateCompatibilityCheckContext(schemaField, unionMember, currentSchema, pathSpec), properties));
}
use of com.linkedin.data.schema.TyperefDataSchema in project rest.li by linkedin.
the class SchemaSampleDataGenerator method buildRecordData.
private static DataMap buildRecordData(ParentSchemas parentSchemas, NamedDataSchema schema, DataGenerationOptions spec) {
spec = preventRecursionIntoAlreadyTraversedSchemas(parentSchemas, spec, schema);
parentSchemas.incrementReferences(schema);
final DataMap data = new DataMap();
if (schema instanceof RecordDataSchema) {
for (RecordDataSchema.Field field : ((RecordDataSchema) schema).getFields()) {
if (!(spec.isRequiredFieldsOnly() && field.getOptional())) {
final Object value;
if (spec.isUseDefaults() && field.getDefault() != null) {
value = field.getDefault();
} else {
value = buildData(parentSchemas, field.getType(), field.getName(), spec);
}
// null is returned for NULL Pegasus type (used in unions, primarily)
if (value == null) {
data.remove(field.getName());
} else {
data.put(field.getName(), value);
}
}
}
} else if (schema instanceof TyperefDataSchema) {
data.put("ref", buildData(parentSchemas, schema.getDereferencedDataSchema(), spec));
} else {
data.put("value", buildData(parentSchemas, schema, spec));
}
parentSchemas.decrementReferences(schema);
return data;
}
use of com.linkedin.data.schema.TyperefDataSchema in project rest.li by linkedin.
the class PdlSchemaParser method parseTyperef.
private TyperefDataSchema parseTyperef(NamedTypeDeclarationContext context, TyperefDeclarationContext typeref) throws ParseException {
Name name = toName(typeref.name);
TyperefDataSchema schema = new TyperefDataSchema(name);
getResolver().addPendingSchema(schema.getFullName());
try {
setDocAndProperties(context, schema);
bindNameToSchema(name, schema.getAliases(), schema);
DataSchema refSchema = toDataSchema(typeref.ref);
checkTyperefCycle(schema, refSchema);
schema.setReferencedType(refSchema);
schema.setRefDeclaredInline(isDeclaredInline(typeref.ref));
} finally {
getResolver().removePendingSchema(schema.getFullName());
}
return schema;
}
use of com.linkedin.data.schema.TyperefDataSchema in project rest.li by linkedin.
the class DynamicRecordTemplate method putArray.
/**
* Puts an array field value by doing the necessary unwrapping at the items level.
* @param field specifies the field to put the value for.
* @param fieldDef specifies the field definition to put the value for.
* @param value provides the value to put for the specified field.
* @param <T> provides the type of the value.
*/
@SuppressWarnings({ "unchecked" })
private <T> void putArray(RecordDataSchema.Field field, FieldDef<T> fieldDef, T value) {
DataList data = new DataList();
Class<?> itemType = null;
ArrayDataSchema arrayDataSchema = null;
if (fieldDef.getDataSchema() instanceof ArrayDataSchema) {
arrayDataSchema = (ArrayDataSchema) fieldDef.getDataSchema();
DataSchema itemSchema = arrayDataSchema.getItems();
if (itemSchema instanceof TyperefDataSchema) {
itemType = DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchemaClass(itemSchema.getDereferencedType());
} else {
itemType = fieldDef.getType().getComponentType();
}
} else {
throw new IllegalArgumentException("Field " + fieldDef.getName() + " does not have an array schema; although the data is an array.");
}
boolean isDataTemplate = DataTemplate.class.isAssignableFrom(itemType);
List<Object> items;
if (value instanceof DataList) {
items = (List<Object>) value;
} else {
items = Arrays.asList((Object[]) value);
}
for (Object item : items) {
if (isDataTemplate) {
Object itemData;
if (item instanceof DataMap) {
itemData = item;
} else {
itemData = ((DataTemplate) item).data();
}
data.add(itemData);
} else {
data.add(DataTemplateUtil.coerceInput(item, (Class<Object>) item.getClass(), itemType.isEnum() ? String.class : itemType));
}
}
putDirect(field, DataList.class, data, SetMode.DISALLOW_NULL);
}
Aggregations