use of com.linkedin.data.schema.DataSchemaResolver in project rest.li by linkedin.
the class AnyRecordValidator method schemaFromName.
protected DataSchema schemaFromName(ValidatorContext context, String schemaName) {
StringBuilder sb = new StringBuilder();
Parameter parameter = getParameter(context.validationOptions());
DataSchemaResolver resolver = parameter.resolver();
NamedDataSchema schema;
if (resolver == null) {
schema = null;
context.addResult(new Message(context.dataElement().path(schemaName), parameter.isValidSchema(), "%1$s cannot obtain schema for \"%2$s\", no resolver", AnyRecordValidator.class.getName(), schemaName));
} else {
schema = resolver.findDataSchema(schemaName, sb);
if (schema == null) {
context.addResult(new Message(context.dataElement().path(schemaName), parameter.isValidSchema(), "%1$s cannot obtain schema for \"%2$s\" (%3$s)", AnyRecordValidator.class.getName(), schemaName, sb.toString()));
}
}
return schema;
}
use of com.linkedin.data.schema.DataSchemaResolver in project rest.li by linkedin.
the class RestLiResourceModelCompatibilityChecker method check.
/**
* Check backwards compatibility between two idl (.restspec.json) files.
*
* @param prevRestspecPath previously existing idl file
* @param currRestspecPath current idl file
* @param compatLevel compatibility level which affects the return value
* @return true if the check result conforms the compatibility level requirement
* e.g. false if backwards compatible changes are found but the level is equivalent
*/
public boolean check(String prevRestspecPath, String currRestspecPath, CompatibilityLevel compatLevel) {
_prevRestspecPath = prevRestspecPath;
_currRestspecPath = currRestspecPath;
Stack<Object> path = new Stack<>();
path.push("");
ResourceSchema prevRec = null;
ResourceSchema currRec = null;
try {
prevRec = _codec.readResourceSchema(new FileInputStream(prevRestspecPath));
} catch (FileNotFoundException e) {
_infoMap.addRestSpecInfo(CompatibilityInfo.Type.RESOURCE_NEW, path, currRestspecPath);
} catch (IOException e) {
_infoMap.addRestSpecInfo(CompatibilityInfo.Type.OTHER_ERROR, path, e.getMessage());
}
try {
currRec = _codec.readResourceSchema(new FileInputStream(currRestspecPath));
} catch (FileNotFoundException e) {
_infoMap.addRestSpecInfo(CompatibilityInfo.Type.RESOURCE_MISSING, path, prevRestspecPath);
} catch (Exception e) {
_infoMap.addRestSpecInfo(CompatibilityInfo.Type.OTHER_ERROR, path, e.getMessage());
}
if (prevRec == null || currRec == null) {
return _infoMap.isCompatible(compatLevel);
}
final DataSchemaResolver resolver;
if (_resolverPath == null) {
resolver = new DefaultDataSchemaResolver();
} else {
resolver = MultiFormatDataSchemaResolver.withBuiltinFormats(_resolverPath);
}
ResourceCompatibilityChecker checker = new ResourceCompatibilityChecker(prevRec, resolver, currRec, resolver);
boolean check = checker.check(compatLevel);
_infoMap.addAll(checker.getInfoMap());
return check;
}
use of com.linkedin.data.schema.DataSchemaResolver in project rest.li by linkedin.
the class RestLiSnapshotCompatibilityChecker method createResolverFromSnapshot.
private static DataSchemaResolver createResolverFromSnapshot(AbstractSnapshot snapshot, String resolverPath) {
final DataSchemaResolver resolver = CompatibilityUtil.getDataSchemaResolver(resolverPath);
for (Map.Entry<String, NamedDataSchema> entry : snapshot.getModels().entrySet()) {
Name name = new Name(entry.getKey());
NamedDataSchema schema = entry.getValue();
resolver.bindNameToSchema(name, schema, DataSchemaLocation.NO_LOCATION);
}
return resolver;
}
use of com.linkedin.data.schema.DataSchemaResolver in project rest.li by linkedin.
the class ExtensionSchemaValidationCmdLineApp method parseAndValidateExtensionSchemas.
static void parseAndValidateExtensionSchemas(String resolverPath, File inputDir) throws IOException, InvalidExtensionSchemaException {
// Parse each extension schema and validate it
Iterator<File> iterator = FileUtils.iterateFiles(inputDir, new String[] { PDL }, true);
DataSchemaResolver resolver = MultiFormatDataSchemaResolver.withBuiltinFormats(resolverPath);
while (iterator.hasNext()) {
File inputFile = iterator.next();
PdlSchemaParser parser = new PdlSchemaParser(resolver);
parser.parse(new FileInputStream(inputFile));
if (parser.hasError()) {
throw new InvalidExtensionSchemaException(parser.errorMessage());
}
List<DataSchema> topLevelDataSchemas = parser.topLevelDataSchemas();
if (topLevelDataSchemas == null || topLevelDataSchemas.isEmpty() || topLevelDataSchemas.size() > 1) {
throw new InvalidExtensionSchemaException("Could not parse extension schema : " + inputFile.getAbsolutePath());
}
DataSchema topLevelDataSchema = topLevelDataSchemas.get(0);
if (!(topLevelDataSchema instanceof NamedDataSchema)) {
throw new InvalidExtensionSchemaException("Invalid extension schema : " + inputFile.getAbsolutePath() + ", the schema is not a named schema.");
}
if (!((NamedDataSchema) topLevelDataSchema).getName().endsWith(EXTENSIONS_SUFFIX)) {
throw new InvalidExtensionSchemaException("Invalid extension schema name: '" + ((NamedDataSchema) topLevelDataSchema).getName() + "'. The name of the extension schema must be <baseSchemaName> + 'Extensions'");
}
List<NamedDataSchema> includes = ((RecordDataSchema) topLevelDataSchema).getInclude();
if (includes.size() != 1) {
throw new InvalidExtensionSchemaException("The extension schema: '" + ((NamedDataSchema) topLevelDataSchema).getName() + "' should include and only include the base schema");
}
NamedDataSchema includeSchema = includes.get(0);
if (!((NamedDataSchema) topLevelDataSchema).getName().startsWith(includeSchema.getName())) {
throw new InvalidExtensionSchemaException("Invalid extension schema name: '" + ((NamedDataSchema) topLevelDataSchema).getName() + "'. The name of the extension schema must be baseSchemaName: '" + includeSchema.getName() + "' + 'Extensions");
}
List<RecordDataSchema.Field> extensionSchemaFields = ((RecordDataSchema) topLevelDataSchema).getFields().stream().filter(f -> !((RecordDataSchema) topLevelDataSchema).isFieldFromIncludes(f)).collect(Collectors.toList());
checkExtensionSchemaFields(extensionSchemaFields);
}
}
use of com.linkedin.data.schema.DataSchemaResolver in project rest.li by linkedin.
the class PdlEncoderTest method parseSchema.
private DataSchema parseSchema(File file) throws IOException {
DataSchemaResolver resolver = MultiFormatDataSchemaResolver.withBuiltinFormats(pegasusSrcDir.getAbsolutePath());
AbstractSchemaParser parser = new PdlSchemaParser(resolver);
parser.parse(new FileInputStream(file));
return extractSchema(parser, file.getAbsolutePath());
}
Aggregations