use of com.linkedin.data.schema.MapDataSchema in project rest.li by linkedin.
the class CompatibilityChecker method check.
private void check(DataSchema older, DataSchema newer) {
Checked toCheck = new Checked(older, newer);
if (_checked.contains(toCheck)) {
return;
}
_checked.add(toCheck);
if (older == newer) {
return;
}
int pathCount = 1;
if (_options.getMode() == CompatibilityOptions.Mode.DATA) {
older = older.getDereferencedDataSchema();
while (newer.getType() == DataSchema.Type.TYPEREF) {
TyperefDataSchema typerefDataSchema = ((TyperefDataSchema) newer);
_path.addLast(typerefDataSchema.getFullName());
_path.addLast(DataSchemaConstants.REF_KEY);
pathCount++;
newer = typerefDataSchema.getRef();
}
}
if (newer.getType() == DataSchema.Type.TYPEREF) {
_path.addLast(((TyperefDataSchema) newer).getFullName());
} else {
_path.addLast(newer.getUnionMemberKey());
}
switch(newer.getType()) {
case TYPEREF:
if (isSameType(older, newer))
checkTyperef((TyperefDataSchema) older, (TyperefDataSchema) newer);
break;
case RECORD:
if (isSameType(older, newer))
checkRecord((RecordDataSchema) older, (RecordDataSchema) newer);
break;
case ARRAY:
if (isSameType(older, newer))
checkArray((ArrayDataSchema) older, (ArrayDataSchema) newer);
break;
case MAP:
if (isSameType(older, newer))
checkMap((MapDataSchema) older, (MapDataSchema) newer);
break;
case ENUM:
if (isSameType(older, newer))
checkEnum((EnumDataSchema) older, (EnumDataSchema) newer);
break;
case FIXED:
if (isSameType(older, newer))
checkFixed((FixedDataSchema) older, (FixedDataSchema) newer);
break;
case UNION:
if (isSameType(older, newer))
checkUnion((UnionDataSchema) older, (UnionDataSchema) newer);
break;
default:
if (newer instanceof PrimitiveDataSchema)
checkPrimitive(older, newer);
else
throw new IllegalStateException("Unknown schema type " + newer.getType() + ", checking old schema " + older + ", new schema " + newer);
break;
}
for (; pathCount > 0; pathCount--) {
_path.removeLast();
}
return;
}
use of com.linkedin.data.schema.MapDataSchema in project rest.li by linkedin.
the class SchemaSampleDataGenerator method buildData.
private static Object buildData(ParentSchemas parentSchemas, DataSchema schema, String fieldName, DataGenerationOptions spec) {
spec = preventRecursionIntoAlreadyTraversedSchemas(parentSchemas, spec, schema);
parentSchemas.incrementReferences(schema);
final DataSchema derefSchema = schema.getDereferencedDataSchema();
final SampleDataCallback callback = spec.getCallback();
Object data = null;
switch(derefSchema.getType()) {
case BOOLEAN:
data = callback.getBoolean(fieldName);
break;
case INT:
data = callback.getInteger(fieldName);
break;
case LONG:
data = callback.getLong(fieldName);
break;
case FLOAT:
data = callback.getFloat(fieldName);
break;
case DOUBLE:
data = callback.getDouble(fieldName);
break;
case BYTES:
data = callback.getBytes(fieldName);
break;
case STRING:
data = callback.getString(fieldName);
break;
case NULL:
data = Data.NULL;
break;
case FIXED:
data = callback.getFixed(fieldName, (FixedDataSchema) derefSchema);
break;
case ENUM:
data = callback.getEnum(fieldName, (EnumDataSchema) derefSchema);
break;
case ARRAY:
final DataList dataList = new DataList(spec.getArraySize());
for (int i = 0; i < spec.getArraySize(); i++) {
final Object item = buildData(parentSchemas, ((ArrayDataSchema) derefSchema).getItems(), fieldName, spec);
dataList.add(item);
}
data = dataList;
break;
case RECORD:
data = buildRecordData(parentSchemas, (RecordDataSchema) derefSchema, spec);
break;
case MAP:
final DataMap dataMap = new DataMap();
for (int i = 0; i < spec.getArraySize(); i++) {
final Object item = buildData(parentSchemas, ((MapDataSchema) derefSchema).getValues(), fieldName, spec);
dataMap.put("mapField_" + _random.nextInt(), item);
}
data = dataMap;
break;
case UNION:
final UnionDataSchema unionSchema = (UnionDataSchema) derefSchema;
final List<DataSchema> types = removeAlreadyTraversedSchemasFromUnionMemberList(parentSchemas, unionSchema.getTypes());
final int unionIndex = _random.nextInt(types.size());
final DataSchema unionItemSchema = types.get(unionIndex);
data = buildData(parentSchemas, unionItemSchema, fieldName, spec);
if (data != null) {
final DataMap unionMap = new DataMap();
unionMap.put(unionItemSchema.getUnionMemberKey(), data);
data = unionMap;
}
break;
case TYPEREF:
data = buildData(parentSchemas, derefSchema, fieldName, spec);
break;
}
parentSchemas.decrementReferences(schema);
return data;
}
use of com.linkedin.data.schema.MapDataSchema in project rest.li by linkedin.
the class PdlSchemaParser method parseMap.
private MapDataSchema parseMap(MapDeclarationContext map) throws ParseException {
TypeAssignmentContext keyType = map.typeParams.key;
TypeAssignmentContext valueType = map.typeParams.value;
MapDataSchema schema = new MapDataSchema(toDataSchema(valueType));
Map<String, Object> propsToAdd = new HashMap<String, Object>();
if (keyType.typeReference() != null) {
String typeName = keyType.typeReference().value;
if (!typeName.equals("string")) {
startErrorMessage(map).append("Unsupported map key type: ").append(typeName).append(". 'string' is the only currently supported map key type.\n");
// TODO(jbetz):
// Support typed map keys once https://github.com/linkedin/rest.li/pull/61 is accepted.
//String qualifiedKeyName = computeFullName(typeName);
//propsToAdd.put("keys", qualifiedKeyName);
}
} else if (keyType.typeDeclaration() != null) {
DataSchema keySchema = parseType(keyType.typeDeclaration());
String json = SchemaToJsonEncoder.schemaToJson(keySchema, JsonBuilder.Pretty.COMPACT);
startErrorMessage(map).append("Unsupported map key type declaration: ").append(json).append(". 'string' is the only currently supported map key type.\n");
// TODO(jbetz):
// Support typed map keys once https://github.com/linkedin/rest.li/pull/61 is accepted.
//DataMap dataMap = codec.stringToMap(json);
//propsToAdd.put("keys", dataMap);
}
schema.setProperties(propsToAdd);
schema.setValuesDeclaredInline(isDeclaredInline(valueType));
return schema;
}
use of com.linkedin.data.schema.MapDataSchema in project rest.li by linkedin.
the class TestMapTemplate method testBooleanMap.
@Test
public void testBooleanMap() {
MapDataSchema schema = (MapDataSchema) DataTemplateUtil.parseSchema("{ \"type\" : \"map\", \"values\" : \"boolean\" }");
Map<String, Boolean> input = asMap("true", true, "false", false);
Map<String, Boolean> adds = asMap("thirteen", true, "seventeen", false, "nineteen", true);
Map<String, Object> badInput = asMap("integer", 99, "long", 999L, "float", 88.0f, "double", 888.0, "string", "hello", "bytes", ByteString.empty(), "object", new Object(), "null", null, "array", new StringArray(), "record", new FooRecord());
Map<String, Object> badOutput = asMap("integer", 99, "long", 999L, "float", 88.0f, "double", 888.0, "string", "hello", "bytes", ByteString.empty(), "map", new DataMap(), "list", new DataList());
testMap(BooleanMap.class, schema, input, adds);
testMapBadInput(BooleanMap.class, schema, input, badInput, badOutput);
}
use of com.linkedin.data.schema.MapDataSchema in project rest.li by linkedin.
the class TestMapTemplate method testLongMap.
@Test
public void testLongMap() {
MapDataSchema schema = (MapDataSchema) DataTemplateUtil.parseSchema("{ \"type\" : \"map\", \"values\" : \"long\" }");
Map<String, Long> input = asMap("one", 1L, "three", 3L, "five", 5L, "seven", 7L, "eleven", 11L);
Map<String, Long> adds = asMap("thirteen", 13L, "seventeen", 17L, "nineteen", 19L);
Map<String, Object> badInput = asMap("boolean", true, "string", "hello", "bytes", ByteString.empty(), "object", new Object(), "null", null, "array", new StringArray(), "record", new FooRecord());
Map<String, Object> badOutput = asMap("boolean", true, "string", "hello", "bytes", ByteString.empty(), "map", new DataMap(), "list", new DataList());
testMap(LongMap.class, schema, input, adds);
testMapBadInput(LongMap.class, schema, input, badInput, badOutput);
Map<String, ? extends Number> castFrom = asMap("one", 1, "three", 3.0f, "five", 5.0, "seven", 7, "eleven", 11);
testNumberMap(LongMap.class, schema, input, castFrom);
}
Aggregations