use of com.linkedin.data.schema.resolver.MultiFormatDataSchemaResolver in project rest.li by linkedin.
the class SchemaFormatTranslator method verifyTranslatedSchemas.
private void verifyTranslatedSchemas(Map<String, SchemaInfo> topLevelTranslatedSchemas) throws IOException, InterruptedException {
File tempDir = new File(FileUtils.getTempDirectory(), "tmpPegasus" + _sourceDir.hashCode());
File errorSchemasDir = new File(FileUtils.getTempDirectory(), "tmpPegasusErrors" + _sourceDir.hashCode());
FileUtils.deleteDirectory(tempDir);
FileUtils.deleteDirectory(errorSchemasDir);
assert tempDir.mkdirs();
// Write the schemas to temp directory for validation. Source files are not deleted/moved for this.
writeTranslatedSchemasToDirectory(topLevelTranslatedSchemas, tempDir, false, null, false);
// Exclude the source models directory from the resolver path
StringTokenizer paths = new StringTokenizer(_resolverPath, File.pathSeparator);
StringBuilder pathBuilder = new StringBuilder();
while (paths.hasMoreTokens()) {
String path = paths.nextToken();
if (path.equals(_sourceDir.getPath()) || path.equals(_sourceDir.getAbsolutePath())) {
// Skip the source models directory
continue;
}
pathBuilder.append(path);
pathBuilder.append(File.pathSeparatorChar);
}
// Include the directory with the generated models in the resolver path
pathBuilder.append(tempDir.getPath());
// Now try loading the schemas from the temp directory and compare with source schema.
String path = pathBuilder.toString();
LOGGER.debug("Creating resolver with path :{}", path);
MultiFormatDataSchemaResolver resolver = MultiFormatDataSchemaResolver.withBuiltinFormats(path);
boolean hasError = false;
List<SchemaInfo> failedSchemas = new ArrayList<>();
for (SchemaInfo schemaInfo : topLevelTranslatedSchemas.values()) {
NamedDataSchema sourceSchema = schemaInfo.getSourceSchema();
String schemaName = sourceSchema.getFullName();
DataSchema destSchema = resolver.findDataSchema(schemaName, new StringBuilder());
if (destSchema == null) {
LOGGER.error("Unable to load translated schema: {}", schemaName);
failedSchemas.add(schemaInfo);
hasError = true;
} else {
LOGGER.debug("Loaded translated schema: {}, from location: {}", schemaName, resolver.nameToDataSchemaLocations().get(schemaName).getSourceFile().getAbsolutePath());
// Verify that the source schema and the translated schema are semantically equivalent
if (!sourceSchema.equals(destSchema)) {
LOGGER.error("Translation failed for schema: {}", schemaName);
// Write the translated schema to temp dir.
File sourceFile = new File(errorSchemasDir, sourceSchema.getName() + "_" + _sourceFormat);
FileUtils.writeStringToFile(sourceFile, SchemaToJsonEncoder.schemaToJson(sourceSchema, JsonBuilder.Pretty.INDENTED));
File destFile = new File(errorSchemasDir, sourceSchema.getName() + "_" + _destFormat);
FileUtils.writeStringToFile(destFile, SchemaToJsonEncoder.schemaToJson(destSchema, JsonBuilder.Pretty.INDENTED));
LOGGER.error("To see the difference between source and tanslated schemas, run: \ndiff {} {}", sourceFile.getAbsolutePath(), destFile.getAbsolutePath());
failedSchemas.add(schemaInfo);
hasError = true;
}
}
}
FileUtils.deleteDirectory(tempDir);
if (hasError) {
LOGGER.error("Found translation errors, aborting translation. Failed schemas:");
for (SchemaInfo schemaInfo : failedSchemas) {
LOGGER.error(schemaInfo.getSourceFile().getAbsolutePath());
}
System.exit(1);
}
}
use of com.linkedin.data.schema.resolver.MultiFormatDataSchemaResolver in project rest.li by linkedin.
the class TestSchemaFormatTranslator method testTranslatePdscFromConvertedPdlInSchema.
@Test(dataProvider = "fullClassName")
public void testTranslatePdscFromConvertedPdlInSchema(String packageName, String className) throws Exception {
FileUtil.FileExtensionFilter pdscFilter = new FileUtil.FileExtensionFilter(SchemaParser.FILE_EXTENSION);
FileUtil.FileExtensionFilter pdlFilter = new FileUtil.FileExtensionFilter(PdlSchemaParser.FILE_EXTENSION);
// pdsc to pdl, keep source fields ('-o' flag)
String pdlTemp = Files.createTempDirectory("restli").toFile().getAbsolutePath();
// Keep original in source root.
SchemaFormatTranslator.main(new String[] { "-o", RESOLVER_DIR, SOURCE_ROOT, pdlTemp });
// Source files are not deleted
List<File> sourceFiles = FileUtil.listFiles(new File(SOURCE_ROOT), pdscFilter);
Assert.assertTrue(sourceFiles.size() > 0);
List<File> destFiles = FileUtil.listFiles(new File(pdlTemp), pdlFilter);
Assert.assertTrue(destFiles.size() > 0);
// All source files are translated.
Assert.assertEquals(destFiles.size(), sourceFiles.size());
// pdl to pdsc, delete source files (no '-o' flag)
int inputPdlFileCount = destFiles.size();
String pdscTemp = Files.createTempDirectory("restli").toFile().getAbsolutePath();
String pdlResolverPath = EXTERNAL_RESOURCES + File.pathSeparator + pdlTemp;
SchemaFormatTranslator.main(new String[] { "-spdl", "-dpdsc", pdlResolverPath, pdlTemp, pdscTemp });
destFiles = FileUtil.listFiles(new File(pdscTemp), pdscFilter);
Assert.assertTrue(destFiles.size() > 0);
Assert.assertEquals(destFiles.size(), inputPdlFileCount);
// Source files are deleted.
Assert.assertTrue(FileUtil.listFiles(new File(pdlTemp), pdlFilter).isEmpty());
MultiFormatDataSchemaResolver sourceResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(RESOLVER_DIR);
MultiFormatDataSchemaResolver translatedResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(pdscTemp + File.pathSeparator + EXTERNAL_RESOURCES);
assertSameSchemas(packageName + "." + className, sourceResolver, translatedResolver);
}
use of com.linkedin.data.schema.resolver.MultiFormatDataSchemaResolver in project rest.li by linkedin.
the class TestSchemaFormatTranslator method testTranslatorWorksWithArgFile.
@Test(dataProvider = "fullClassName")
public void testTranslatorWorksWithArgFile(String packageName, String className) throws Exception {
File tempDir = Files.createTempDirectory("restli").toFile();
File argFile = new File(tempDir, "resolverPath");
Files.write(argFile.toPath(), Collections.singletonList(RESOLVER_DIR));
SchemaFormatTranslator.main(new String[] { "-o", String.format("@%s", argFile.toPath()), SOURCE_ROOT, tempDir.getAbsolutePath() });
MultiFormatDataSchemaResolver sourceResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(RESOLVER_DIR);
MultiFormatDataSchemaResolver translatedResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(tempDir.getAbsolutePath() + File.pathSeparator + EXTERNAL_RESOURCES);
assertSameSchemas(packageName + "." + className, sourceResolver, translatedResolver);
}
use of com.linkedin.data.schema.resolver.MultiFormatDataSchemaResolver in project rest.li by linkedin.
the class TestSchemaFormatTranslator method testTranslatePdscToPdl.
@Test(dataProvider = "fullClassName")
public void testTranslatePdscToPdl(String packageName, String className) throws Exception {
String temp = Files.createTempDirectory("restli").toFile().getAbsolutePath();
SchemaFormatTranslator.main(new String[] { "-o", RESOLVER_DIR, SOURCE_ROOT, temp });
MultiFormatDataSchemaResolver sourceResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(RESOLVER_DIR);
MultiFormatDataSchemaResolver translatedResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(temp + File.pathSeparator + EXTERNAL_RESOURCES);
assertSameSchemas(packageName + "." + className, sourceResolver, translatedResolver);
}
use of com.linkedin.data.schema.resolver.MultiFormatDataSchemaResolver in project rest.li by linkedin.
the class TestResponseUtils method testGetAbsentFieldsDefaultValues.
@Test(dataProvider = "default_serialization")
public void testGetAbsentFieldsDefaultValues(String caseFilename) {
try {
MultiFormatDataSchemaResolver schemaResolver = MultiFormatDataSchemaResolver.withBuiltinFormats(resolverDir);
String expectedDataJsonFile = Files.readFile(new File(pegasusDir + FS + caseFilename));
DataMap caseData = DataMapUtils.readMap(new ByteArrayInputStream(expectedDataJsonFile.getBytes()), Collections.emptyMap());
String schemaFileText = Files.readFile(new File(pegasusDir + FS + caseData.get("schema")));
DataMap caseInput = (DataMap) caseData.get("input");
DataMap caseExpect = (DataMap) caseData.get("expect");
DataSchema schema = DataTemplateUtil.parseSchema(schemaFileText, schemaResolver, SchemaFormatType.PDL);
DataMap dataWithDefault = (DataMap) ResponseUtils.fillInDataDefault(schema, caseInput);
System.out.println("Expect " + caseExpect);
System.out.println("Actual " + dataWithDefault);
Assert.assertEquals(dataWithDefault, caseExpect, (String) caseData.get("context"));
} catch (Exception e) {
Assert.fail("Test failed with exception: \n" + e.toString());
}
}
Aggregations