use of io.cdap.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class PluggableFilterTransform method configurePipeline.
@Override
public void configurePipeline(PipelineConfigurer pipelineConfigurer) throws IllegalArgumentException {
Map<String, String> filterProperties = GSON.fromJson(conf.filterProperties, MAP_TYPE);
PluginProperties pluginProperties = PluginProperties.builder().addAll(filterProperties).build();
filter = pipelineConfigurer.usePlugin(FILTER_PLUGIN_TYPE, conf.filterPlugin, "id", pluginProperties);
if (filter == null) {
throw new IllegalArgumentException("Could not find filter plugin " + conf.filterPlugin);
}
}
use of io.cdap.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class ExternalSparkProgram method configure.
@Override
protected void configure() {
setClientResources(phaseSpec.getClientResources());
setDriverResources(phaseSpec.getDriverResources());
setExecutorResources(phaseSpec.getResources());
// register the plugins at program level so that the program can be failed by the platform early in case of
// plugin requirements not being meet
phaseSpec.getPhase().registerPlugins(getConfigurer(), runtimeConfigurer, deployedNamespace);
PluginSpec pluginSpec = stageSpec.getPlugin();
PluginProperties pluginProperties = PluginProperties.builder().addAll(pluginSpec.getProperties()).build();
// use a UUID as plugin ID so that it doesn't clash with anything. Only using the class here to
// check which main class is needed
// TODO: clean this up so that we only get the class once and store it in the PluginSpec instead of getting
// it in the pipeline spec generator and here
Object sparkPlugin = usePlugin(pluginSpec.getType(), pluginSpec.getName(), UUID.randomUUID().toString(), pluginProperties);
if (sparkPlugin == null) {
// should never happen, should have been checked before by the pipeline spec generator
throw new IllegalStateException(String.format("No plugin found of type %s and name %s for stage %s", pluginSpec.getType(), pluginSpec.getName(), STAGE_NAME));
}
if (Spark.class.isAssignableFrom(sparkPlugin.getClass())) {
// TODO: Pass in a forwarding configurer so that we can capture the properties set by the plugin
// However the usage is very limited as the plugin can always use plugin config to preserve properties
((Spark) sparkPlugin).configure(getConfigurer());
} else if (SparkMain.class.isAssignableFrom(sparkPlugin.getClass())) {
setMainClass(ScalaSparkMainWrapper.class);
} else {
setMainClass(JavaSparkMainWrapper.class);
}
setName(phaseSpec.getPhaseName());
Map<String, String> properties = new HashMap<>();
properties.put(STAGE_NAME, stageSpec.getName());
properties.put(Constants.PIPELINEID, GSON.toJson(phaseSpec, BatchPhaseSpec.class));
setProperties(properties);
}
use of io.cdap.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class JavaSparkMainWrapper method getProgramArgs.
@Nullable
private String getProgramArgs(JavaSparkExecutionContext sec, String stageName) {
// get program args from plugin properties
PluginProperties pluginProperties = sec.getPluginContext().getPluginProperties(stageName);
String programArgs = pluginProperties == null ? null : pluginProperties.getProperties().get(ExternalSparkProgram.PROGRAM_ARGS);
// can be overridden by runtime args
String programArgsKey = stageName + "." + ExternalSparkProgram.PROGRAM_ARGS;
if (sec.getRuntimeArguments().containsKey(programArgsKey)) {
programArgs = sec.getRuntimeArguments().get(programArgsKey);
}
return programArgs;
}
use of io.cdap.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class ArtifactRepositoryTest method testPluginProperties.
@Test
public void testPluginProperties() {
PluginProperties pluginProperties = PluginProperties.builder().add("class.name", TEST_EMPTY_CLASS).add("timeout", "10").add("name", "${macro}").build();
Assert.assertTrue(pluginProperties.getMacros().getLookups().isEmpty());
Set<String> lookups = new HashSet<>();
lookups.add("macro");
PluginProperties updatedPluginProperties = pluginProperties.setMacros(new Macros(lookups, new HashSet<>()));
Assert.assertTrue(pluginProperties.getMacros().getLookups().isEmpty());
Assert.assertEquals(lookups, updatedPluginProperties.getMacros().getLookups());
Assert.assertTrue(updatedPluginProperties.getMacros().getMacroFunctions().isEmpty());
}
use of io.cdap.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class PluginInstantiator method newInstance.
/**
* Creates a new instance of the given plugin class with all property macros substituted if a MacroEvaluator is given.
* At runtime, plugin property fields that are macro-enabled and contain macro syntax will remain in the macroFields
* set in the plugin config.
* @param plugin {@link Plugin}
* @param macroEvaluator the MacroEvaluator that performs macro substitution
* @param options macro parser options
* @param <T> Type of the plugin
* @return a new plugin instance with macros substituted
* @throws IOException if failed to expand the plugin jar to create the plugin ClassLoader
* @throws ClassNotFoundException if failed to load the given plugin class
* @throws InvalidPluginConfigException if the PluginConfig could not be created from the plugin properties
*/
public <T> T newInstance(Plugin plugin, @Nullable MacroEvaluator macroEvaluator, @Nullable MacroParserOptions options) throws IOException, ClassNotFoundException, InvalidMacroException {
ClassLoader classLoader = getPluginClassLoader(plugin);
PluginClass pluginClass = plugin.getPluginClass();
@SuppressWarnings("unchecked") TypeToken<T> pluginType = TypeToken.of((Class<T>) classLoader.loadClass(pluginClass.getClassName()));
try {
String configFieldName = pluginClass.getConfigFieldName();
// Plugin doesn't have config. Simply return a new instance.
if (configFieldName == null) {
return instantiatorFactory.get(pluginType).create();
}
// Create the config instance
Field field = Fields.findField(pluginType.getType(), configFieldName);
TypeToken<?> configFieldType = pluginType.resolveType(field.getGenericType());
Object config = instantiatorFactory.get(configFieldType).create();
// perform macro substitution if an evaluator is provided, collect fields with macros only at configure time
PluginProperties pluginProperties = substituteMacros(plugin, macroEvaluator, options);
Set<String> macroFields = (macroEvaluator == null) ? getFieldsWithMacro(plugin) : Collections.emptySet();
PluginProperties rawProperties = plugin.getProperties();
ConfigFieldSetter fieldSetter = new ConfigFieldSetter(pluginClass, pluginProperties, rawProperties, macroFields);
Reflections.visit(config, configFieldType.getType(), fieldSetter);
if (!fieldSetter.invalidProperties.isEmpty() || !fieldSetter.missingProperties.isEmpty()) {
throw new InvalidPluginConfigException(pluginClass, fieldSetter.missingProperties, fieldSetter.invalidProperties);
}
// Create the plugin instance
return newInstance(pluginType, field, configFieldType, config);
} catch (NoSuchFieldException e) {
throw new InvalidPluginConfigException("Config field not found in plugin class: " + pluginClass, e);
} catch (IllegalAccessException e) {
throw new InvalidPluginConfigException("Failed to set plugin config field: " + pluginClass, e);
}
}
Aggregations