use of co.cask.cdap.internal.io.ReflectionSchemaGenerator in project cdap by caskdata.
the class SchemaTest method testPrimitiveArray.
@Test
public void testPrimitiveArray() throws UnsupportedTypeException {
Schema schema = new ReflectionSchemaGenerator().generate(int[].class);
Assert.assertEquals(Schema.arrayOf(Schema.of(Schema.Type.INT)), schema);
}
use of co.cask.cdap.internal.io.ReflectionSchemaGenerator in project cdap by caskdata.
the class SchemaTest method testSameRecordDifferentLevels.
@Test
public void testSameRecordDifferentLevels() throws UnsupportedTypeException, IOException {
Schema actual = new ReflectionSchemaGenerator().generate(Node6.class);
Assert.assertEquals(Node6.SCHEMA, actual);
// check serialization and deserialization.
Assert.assertEquals(Node6.SCHEMA, Schema.parseJson(actual.toString()));
}
use of co.cask.cdap.internal.io.ReflectionSchemaGenerator in project cdap by caskdata.
the class InMemoryConfigurator method getSpecJson.
private <T extends Config> String getSpecJson(Application<T> app) throws Exception {
// This Gson cannot be static since it is used to deserialize user class.
// Gson will keep a static map to class, hence will leak the classloader
Gson gson = new GsonBuilder().registerTypeAdapterFactory(new CaseInsensitiveEnumTypeAdapterFactory()).create();
// Now, we call configure, which returns application specification.
DefaultAppConfigurer configurer;
File tempDir = DirUtils.createTempDir(baseUnpackDir);
try (PluginInstantiator pluginInstantiator = new PluginInstantiator(cConf, app.getClass().getClassLoader(), tempDir)) {
configurer = new DefaultAppConfigurer(appNamespace, artifactId, app, configString, artifactRepository, pluginInstantiator);
T appConfig;
Type configType = Artifacts.getConfigType(app.getClass());
if (configString.isEmpty()) {
// noinspection unchecked
appConfig = ((Class<T>) configType).newInstance();
} else {
try {
appConfig = gson.fromJson(configString, configType);
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Invalid JSON configuration was provided. Please check the syntax.", e);
}
}
try {
ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(new CombineClassLoader(null, app.getClass().getClassLoader(), getClass().getClassLoader()));
try {
app.configure(configurer, new DefaultApplicationContext<>(appConfig));
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
} catch (Throwable t) {
Throwable rootCause = Throwables.getRootCause(t);
if (rootCause instanceof ClassNotFoundException) {
// Heuristic to provide better error message
String missingClass = rootCause.getMessage();
// If the missing class has "spark" in the name, try to see if Spark is available
if (missingClass.startsWith("org.apache.spark.") || missingClass.startsWith("co.cask.cdap.api.spark.")) {
// Try to load the SparkContext class, which should be available if Spark is available in the platform
try {
artifactClassLoader.loadClass("org.apache.spark.SparkContext");
} catch (ClassNotFoundException e) {
// Spark is not available, it is most likely caused by missing Spark in the platform
throw new IllegalStateException("Missing Spark related class " + missingClass + ". It may be caused by unavailability of Spark. " + "Please verify environment variable " + Constants.SPARK_HOME + " is set correctly", t);
}
// Spark is available, can be caused by incompatible Spark version
throw new InvalidArtifactException("Missing Spark related class " + missingClass + ". Configured to use Spark located at " + System.getenv(Constants.SPARK_HOME) + ", which may be incompatible with the one required by the application", t);
}
// then the missing class is most likely due to some missing library in the artifact jar
throw new InvalidArtifactException("Missing class " + missingClass + ". It may be caused by missing dependency jar(s) in the artifact jar.", t);
}
throw t;
}
} finally {
try {
DirUtils.deleteDirectoryContents(tempDir);
} catch (IOException e) {
LOG.warn("Exception raised when deleting directory {}", tempDir, e);
}
}
ApplicationSpecification specification = configurer.createSpecification(applicationName, applicationVersion);
// TODO: The SchemaGenerator should be injected
return ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator()).toJson(specification);
}
use of co.cask.cdap.internal.io.ReflectionSchemaGenerator in project cdap by caskdata.
the class FlowUtils method getAllConsumerGroups.
/**
* Gets all consumer group configurations for the given queue.
*/
private static Set<ConsumerGroupConfig> getAllConsumerGroups(Program program, FlowSpecification flowSpec, QueueName queueName, Table<QueueSpecificationGenerator.Node, String, Set<QueueSpecification>> queueSpecs) {
Set<ConsumerGroupConfig> groupConfigs = Sets.newHashSet();
SchemaGenerator schemaGenerator = new ReflectionSchemaGenerator();
// Get all the consumers of this queue.
for (Map.Entry<String, FlowletDefinition> entry : flowSpec.getFlowlets().entrySet()) {
String flowletId = entry.getKey();
for (QueueSpecification queueSpec : Iterables.concat(queueSpecs.column(flowletId).values())) {
if (!queueSpec.getQueueName().equals(queueName)) {
continue;
}
try {
// Inspect the flowlet consumer
FlowletDefinition flowletDefinition = entry.getValue();
Class<?> flowletClass = program.getClassLoader().loadClass(flowletDefinition.getFlowletSpec().getClassName());
long groupId = generateConsumerGroupId(program.getId(), flowletId);
addConsumerGroup(queueSpec, flowletClass, groupId, flowletDefinition.getInstances(), schemaGenerator, groupConfigs);
} catch (ClassNotFoundException e) {
// There is no way for not able to load a Flowlet class as it should be verified during deployment.
throw Throwables.propagate(e);
}
}
}
return groupConfigs;
}
use of co.cask.cdap.internal.io.ReflectionSchemaGenerator in project cdap by caskdata.
the class ConfiguratorTest method testAppWithConfig.
@Test
public void testAppWithConfig() throws Exception {
LocationFactory locationFactory = new LocalLocationFactory(TMP_FOLDER.newFolder());
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, ConfigTestApp.class);
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, ConfigTestApp.class.getSimpleName(), "1.0.0");
CConfiguration cConf = CConfiguration.create();
ArtifactRepository baseArtifactRepo = new DefaultArtifactRepository(conf, null, null, new DummyProgramRunnerFactory(), new DefaultImpersonator(cConf, null));
ArtifactRepository artifactRepo = new AuthorizationArtifactRepository(baseArtifactRepo, authEnforcer, authenticationContext);
ConfigTestApp.ConfigClass config = new ConfigTestApp.ConfigClass("myStream", "myTable");
// Create a configurator that is testable. Provide it an application.
try (CloseableClassLoader artifactClassLoader = artifactRepo.createArtifactClassLoader(appJar, new EntityImpersonator(artifactId.getNamespace().toEntityId(), new DefaultImpersonator(cConf, null)))) {
Configurator configuratorWithConfig = new InMemoryConfigurator(conf, Id.Namespace.DEFAULT, artifactId, ConfigTestApp.class.getName(), artifactRepo, artifactClassLoader, null, null, new Gson().toJson(config));
ListenableFuture<ConfigResponse> result = configuratorWithConfig.config();
ConfigResponse response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification specification = adapter.fromJson(response.get());
Assert.assertNotNull(specification);
Assert.assertTrue(specification.getStreams().size() == 1);
Assert.assertTrue(specification.getStreams().containsKey("myStream"));
Assert.assertTrue(specification.getDatasets().size() == 1);
Assert.assertTrue(specification.getDatasets().containsKey("myTable"));
Configurator configuratorWithoutConfig = new InMemoryConfigurator(conf, Id.Namespace.DEFAULT, artifactId, ConfigTestApp.class.getName(), artifactRepo, artifactClassLoader, null, null, null);
result = configuratorWithoutConfig.config();
response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
specification = adapter.fromJson(response.get());
Assert.assertNotNull(specification);
Assert.assertTrue(specification.getStreams().size() == 1);
Assert.assertTrue(specification.getStreams().containsKey(ConfigTestApp.DEFAULT_STREAM));
Assert.assertTrue(specification.getDatasets().size() == 1);
Assert.assertTrue(specification.getDatasets().containsKey(ConfigTestApp.DEFAULT_TABLE));
Assert.assertNotNull(specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME));
ProgramStatusTrigger trigger = (ProgramStatusTrigger) specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME).getTrigger();
Assert.assertEquals(trigger.getProgramId().getProgram(), ConfigTestApp.WORKFLOW_NAME);
}
}
Aggregations