use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class PdlSchemaParser method parseIncludes.
private FieldsAndIncludes parseIncludes(PdlParser.FieldIncludesContext includeSet) throws ParseException {
List<NamedDataSchema> includes = new ArrayList<>();
Set<NamedDataSchema> includesDeclaredInline = new HashSet<>();
List<Field> fields = new ArrayList<>();
if (includeSet != null) {
List<TypeAssignmentContext> includeTypes = includeSet.typeAssignment();
for (TypeAssignmentContext includeRef : includeTypes) {
DataSchema includedSchema = toDataSchema(includeRef);
if (includedSchema != null) {
DataSchema dereferencedIncludedSchema = includedSchema.getDereferencedDataSchema();
if (includedSchema instanceof NamedDataSchema && dereferencedIncludedSchema instanceof RecordDataSchema) {
NamedDataSchema includedNamedSchema = (NamedDataSchema) includedSchema;
RecordDataSchema dereferencedIncludedRecordSchema = (RecordDataSchema) dereferencedIncludedSchema;
fields.addAll(dereferencedIncludedRecordSchema.getFields());
includes.add(includedNamedSchema);
if (isDeclaredInline(includeRef)) {
includesDeclaredInline.add(includedNamedSchema);
}
} else {
startErrorMessage(includeRef).append("Include is not a record type or a typeref to a record type: ").append(includeRef).append(NEWLINE);
}
} else {
startErrorMessage(includeRef).append("Unable to resolve included schema: ").append(includeRef).append(NEWLINE);
}
}
}
return new FieldsAndIncludes(fields, includes, includesDeclaredInline);
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class AvroSchemaGenerator method targetFiles.
protected List<File> targetFiles(File targetDirectory) {
ArrayList<File> generatedFiles = new ArrayList<>();
DataSchemaResolver resolver = getSchemaResolver();
Map<String, DataSchemaLocation> nameToLocations = resolver.nameToDataSchemaLocations();
Map<String, NamedDataSchema> nameToSchema = resolver.bindings();
for (Map.Entry<String, DataSchemaLocation> entry : nameToLocations.entrySet()) {
String fullName = entry.getKey();
DataSchemaLocation location = entry.getValue();
if (_sourceLocations.contains(location) || _sources.contains(fullName)) {
NamedDataSchema schema = nameToSchema.get(fullName);
if (schema instanceof RecordDataSchema) {
RecordDataSchema recordDataSchema = (RecordDataSchema) schema;
File generatedFile = fileForAvroSchema(fullName, targetDirectory);
generatedFiles.add(generatedFile);
String preTranslateSchemaText = recordDataSchema.toString();
String avroSchemaText = SchemaTranslator.dataToAvroSchemaJson(recordDataSchema, _options);
_fileToAvroSchemaMap.put(generatedFile, avroSchemaText);
String postTranslateSchemaText = recordDataSchema.toString();
assert (preTranslateSchemaText.equals(postTranslateSchemaText));
}
}
}
return generatedFiles;
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class AbstractDefaultDataTranslator method translate.
protected Object translate(List<Object> path, Object value, DataSchema dataSchema) {
dataSchema = dataSchema.getDereferencedDataSchema();
DataSchema.Type type = dataSchema.getType();
Object result;
switch(type) {
case NULL:
if (value != Data.NULL) {
throw new IllegalArgumentException(message(path, "value must be null for null schema"));
}
result = value;
break;
case BOOLEAN:
result = ((Boolean) value).booleanValue();
break;
case INT:
result = ((Number) value).intValue();
break;
case LONG:
result = ((Number) value).longValue();
break;
case FLOAT:
result = ((Number) value).floatValue();
break;
case DOUBLE:
result = ((Number) value).doubleValue();
break;
case STRING:
result = (String) value;
break;
case BYTES:
Class<?> clazz = value.getClass();
if (clazz != String.class && clazz != ByteString.class) {
throw new IllegalArgumentException(message(path, "bytes value %1$s is not a String or ByteString", value));
}
result = value;
break;
case ENUM:
String enumValue = (String) value;
EnumDataSchema enumDataSchema = (EnumDataSchema) dataSchema;
if (!enumDataSchema.getSymbols().contains(enumValue)) {
throw new IllegalArgumentException(message(path, "enum value %1$s not one of %2$s", value, enumDataSchema.getSymbols()));
}
result = value;
break;
case FIXED:
clazz = value.getClass();
ByteString byteString;
if (clazz == String.class) {
byteString = ByteString.copyAvroString((String) value, true);
} else if (clazz == ByteString.class) {
byteString = (ByteString) value;
} else {
throw new IllegalArgumentException(message(path, "fixed value %1$s is not a String or ByteString", value));
}
FixedDataSchema fixedDataSchema = (FixedDataSchema) dataSchema;
if (fixedDataSchema.getSize() != byteString.length()) {
throw new IllegalArgumentException(message(path, "ByteString size %1$d != FixedDataSchema size %2$d", byteString.length(), fixedDataSchema.getSize()));
}
result = byteString;
break;
case MAP:
DataMap map = (DataMap) value;
DataSchema valueDataSchema = ((MapDataSchema) dataSchema).getValues();
Map<String, Object> resultMap = new DataMap(map.size() * 2);
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
path.add(key);
Object entryAvroValue = translate(path, entry.getValue(), valueDataSchema);
path.remove(path.size() - 1);
resultMap.put(key, entryAvroValue);
}
result = resultMap;
break;
case ARRAY:
DataList list = (DataList) value;
DataList resultList = new DataList(list.size());
DataSchema elementDataSchema = ((ArrayDataSchema) dataSchema).getItems();
for (int i = 0; i < list.size(); i++) {
path.add(i);
Object entryAvroValue = translate(path, list.get(i), elementDataSchema);
path.remove(path.size() - 1);
resultList.add(entryAvroValue);
}
result = resultList;
break;
case RECORD:
DataMap recordMap = (DataMap) value;
RecordDataSchema recordDataSchema = (RecordDataSchema) dataSchema;
DataMap resultRecordMap = new DataMap(recordDataSchema.getFields().size() * 2);
for (RecordDataSchema.Field field : recordDataSchema.getFields()) {
String fieldName = field.getName();
Object fieldValue = recordMap.get(fieldName);
path.add(fieldName);
Object resultFieldValue = translateField(path, fieldValue, field);
path.remove(path.size() - 1);
if (resultFieldValue != null) {
resultRecordMap.put(fieldName, resultFieldValue);
}
}
result = resultRecordMap;
break;
case UNION:
result = translateUnion(path, value, (UnionDataSchema) dataSchema);
break;
default:
throw new IllegalStateException(message(path, "schema type unknown %1$s", type));
}
return result;
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class DefaultDataToAvroConvertCallback method callback.
@Override
public void callback(List<String> path, DataSchema schema) {
if (schema.getType() != DataSchema.Type.RECORD) {
return;
}
// These are handled in AvroOverrideFactory#createFromDataSchema() while encoding the Avro schema.
if (schema.getProperties().get("avro") != null) {
return;
}
RecordDataSchema recordSchema = (RecordDataSchema) schema;
for (RecordDataSchema.Field field : recordSchema.getFields()) {
FieldOverride defaultValueOverride = _defaultValueOverrides.get(field);
if (defaultValueOverride == null) {
Object defaultData = field.getDefault();
if (defaultData != null) {
if (_options.getDefaultFieldTranslationMode() == PegasusToAvroDefaultFieldTranslationMode.DO_NOT_TRANSLATE) {
// If defaultField translationMode is "DO_NOT_TRANSLATE"
// set override to NULL as well
_defaultValueOverrides.put(field, FieldOverride.NULL_DEFAULT_VALUE);
} else {
path.add(field.getName());
_newDefaultSchema = null;
Object newDefault = translateField(pathList(path), defaultData, field);
_defaultValueOverrides.put(field, new FieldOverride(_newDefaultSchema, newDefault));
path.remove(path.size() - 1);
}
} else if (field.getOptional()) {
// no default specified and optional
_defaultValueOverrides.put(field, FieldOverride.NULL_DEFAULT_VALUE);
}
}
}
}
use of com.linkedin.data.schema.RecordDataSchema in project rest.li by linkedin.
the class DefaultAvroToDataConvertCallback method callback.
@Override
public void callback(List<String> path, DataSchema schema) {
if (schema.getType() != DataSchema.Type.RECORD) {
return;
}
RecordDataSchema recordSchema = (RecordDataSchema) schema;
for (RecordDataSchema.Field field : recordSchema.getFields()) {
Object defaultData = field.getDefault();
if (defaultData != null) {
path.add(DataSchemaConstants.DEFAULT_KEY);
Object newDefault = translateField(pathList(path), defaultData, field);
path.remove(path.size() - 1);
field.setDefault(newDefault);
}
}
}
Aggregations