use of co.cask.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class ExternalSparkProgram method configure.
@Override
protected void configure() {
PluginSpec pluginSpec = stageSpec.getPlugin();
PluginProperties pluginProperties = PluginProperties.builder().addAll(pluginSpec.getProperties()).build();
// use a UUID as plugin ID so that it doesn't clash with anything. Only using the class here to
// check which main class is needed
// TODO: clean this up so that we only get the class once and store it in the PluginSpec instead of getting
// it in the pipeline spec generator and here
Object sparkPlugin = usePlugin(pluginSpec.getType(), pluginSpec.getName(), UUID.randomUUID().toString(), pluginProperties);
if (sparkPlugin == null) {
// should never happen, should have been checked before by the pipeline spec generator
throw new IllegalStateException(String.format("No plugin found of type %s and name %s for stage %s", pluginSpec.getType(), pluginSpec.getName(), STAGE_NAME));
}
if (Spark.class.isAssignableFrom(sparkPlugin.getClass())) {
// TODO: Pass in a forwarding configurer so that we can capture the properties set by the plugin
// However the usage is very limited as the plugin can always use plugin config to preserve properties
((Spark) sparkPlugin).configure(getConfigurer());
} else if (SparkMain.class.isAssignableFrom(sparkPlugin.getClass())) {
setMainClass(ScalaSparkMainWrapper.class);
} else {
setMainClass(JavaSparkMainWrapper.class);
}
setName(phaseSpec.getPhaseName());
Map<String, String> properties = new HashMap<>();
properties.put(STAGE_NAME, stageSpec.getName());
properties.put(Constants.PIPELINEID, GSON.toJson(phaseSpec, BatchPhaseSpec.class));
setProperties(properties);
}
use of co.cask.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class PluginInstantiator method newInstance.
/**
* Creates a new instance of the given plugin class with all property macros substituted if a MacroEvaluator is given.
* At runtime, plugin property fields that are macro-enabled and contain macro syntax will remain in the macroFields
* set in the plugin config.
* @param plugin {@link Plugin}
* @param macroEvaluator the MacroEvaluator that performs macro substitution
* @param <T> Type of the plugin
* @return a new plugin instance with macros substituted
* @throws IOException if failed to expand the plugin jar to create the plugin ClassLoader
* @throws ClassNotFoundException if failed to load the given plugin class
*/
public <T> T newInstance(Plugin plugin, @Nullable MacroEvaluator macroEvaluator) throws IOException, ClassNotFoundException, InvalidMacroException {
ClassLoader classLoader = getPluginClassLoader(plugin);
PluginClass pluginClass = plugin.getPluginClass();
@SuppressWarnings("unchecked") TypeToken<T> pluginType = TypeToken.of((Class<T>) classLoader.loadClass(pluginClass.getClassName()));
try {
String configFieldName = pluginClass.getConfigFieldName();
// Plugin doesn't have config. Simply return a new instance.
if (configFieldName == null) {
return instantiatorFactory.get(pluginType).create();
}
// Create the config instance
Field field = Fields.findField(pluginType.getType(), configFieldName);
TypeToken<?> configFieldType = pluginType.resolveType(field.getGenericType());
Object config = instantiatorFactory.get(configFieldType).create();
// perform macro substitution if an evaluator is provided, collect fields with macros only at configure time
PluginProperties pluginProperties = substituteMacros(plugin, macroEvaluator);
Set<String> macroFields = (macroEvaluator == null) ? getFieldsWithMacro(plugin) : Collections.emptySet();
Reflections.visit(config, configFieldType.getType(), new ConfigFieldSetter(pluginClass, plugin.getArtifactId(), pluginProperties, macroFields));
// Create the plugin instance
return newInstance(pluginType, field, configFieldType, config);
} catch (NoSuchFieldException e) {
throw new InvalidPluginConfigException("Config field not found in plugin class: " + pluginClass, e);
} catch (IllegalAccessException e) {
throw new InvalidPluginConfigException("Failed to set plugin config field: " + pluginClass, e);
}
}
use of co.cask.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class ArtifactRepositoryTest method testPluginProperties.
@Test
public void testPluginProperties() throws Exception {
PluginProperties pluginProperties = PluginProperties.builder().add("class.name", TEST_EMPTY_CLASS).add("timeout", "10").add("name", "${macro}").build();
Assert.assertTrue(pluginProperties.getMacros().getLookups().isEmpty());
Set<String> lookups = new HashSet<>();
lookups.add("macro");
PluginProperties updatedPluginProperties = pluginProperties.setMacros(new Macros(lookups, new HashSet<MacroFunction>()));
Assert.assertTrue(pluginProperties.getMacros().getLookups().isEmpty());
Assert.assertEquals(lookups, updatedPluginProperties.getMacros().getLookups());
Assert.assertTrue(updatedPluginProperties.getMacros().getMacroFunctions().isEmpty());
}
use of co.cask.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class ArtifactRepositoryTest method testMacroPlugin.
@Test
public void testMacroPlugin() throws Exception {
File pluginDir = TMP_FOLDER.newFolder();
addPluginArtifact();
SortedMap<ArtifactDescriptor, Set<PluginClass>> plugins = getPlugins();
copyArtifacts(pluginDir, plugins);
// set up test macro evaluator's substitutions
Map<String, String> propertySubstitutions = ImmutableMap.<String, String>builder().put("expansiveHostname", "${hostname}/${path}:${port}").put("hostname", "${one}").put("path", "${two}").put("port", "${three}").put("one", "${host${hostScopeMacro}}").put("hostScopeMacro", "-local").put("host-local", "${l}${o}${c}${a}${l}${hostSuffix}").put("l", "l").put("o", "o").put("c", "c").put("a", "a").put("hostSuffix", "host").put("two", "${filename${fileTypeMacro}}").put("three", "${firstPortDigit}${secondPortDigit}").put("filename", "index").put("fileTypeMacro", "-html").put("filename-html", "index.html").put("filename-php", "index.php").put("firstPortDigit", "8").put("secondPortDigit", "0").put("aBoolean", "true").put("aByte", "101").put("aChar", "k").put("aDouble", "64.0").put("aFloat", "52.0").put("anInt", "42").put("aLong", "32").put("aShort", "81").build();
// Instantiate the plugins and execute them
try (PluginInstantiator instantiator = new PluginInstantiator(cConf, appClassLoader, pluginDir)) {
for (Map.Entry<ArtifactDescriptor, Set<PluginClass>> entry : plugins.entrySet()) {
for (PluginClass pluginClass : entry.getValue()) {
Plugin pluginInfo = new Plugin(new ArrayList<ArtifactId>(), entry.getKey().getArtifactId(), pluginClass, PluginProperties.builder().add("class.name", TEST_EMPTY_CLASS).add("nullableLongFlag", "10").add("host", "${expansiveHostname}").add("aBoolean", "${aBoolean}").add("aByte", "${aByte}").add("aChar", "${aChar}").add("aDouble", "${aDouble}").add("anInt", "${anInt}").add("aFloat", "${aFloat}").add("aLong", "${aLong}").add("aShort", "${aShort}").build());
TestMacroEvaluator testMacroEvaluator = new TestMacroEvaluator(propertySubstitutions, new HashMap<String, String>());
Callable<String> plugin = instantiator.newInstance(pluginInfo, testMacroEvaluator);
Assert.assertEquals("localhost/index.html:80,true,101,k,64.0,52.0,42,32,81", plugin.call());
String pluginId = "5";
PluginContext pluginContext = new DefaultPluginContext(instantiator, NamespaceId.DEFAULT.app("abc").worker("w"), ImmutableMap.of(pluginId, pluginInfo));
PluginProperties resolvedProperties = pluginContext.getPluginProperties(pluginId, testMacroEvaluator);
Map<String, String> expected = new HashMap<>();
expected.put("class.name", TEST_EMPTY_CLASS);
expected.put("nullableLongFlag", "10");
expected.put("host", "localhost/index.html:80");
expected.put("aBoolean", "true");
expected.put("aByte", "101");
expected.put("aChar", "k");
expected.put("aDouble", "64.0");
expected.put("anInt", "42");
expected.put("aFloat", "52.0");
expected.put("aLong", "32");
expected.put("aShort", "81");
Assert.assertEquals(expected, resolvedProperties.getProperties());
}
}
}
}
use of co.cask.cdap.api.plugin.PluginProperties in project cdap by caskdata.
the class JavaSparkMainWrapper method getProgramArgs.
@Nullable
private String getProgramArgs(JavaSparkExecutionContext sec, String stageName) {
// get program args from plugin properties
PluginProperties pluginProperties = sec.getPluginContext().getPluginProperties(stageName);
String programArgs = pluginProperties == null ? null : pluginProperties.getProperties().get(ExternalSparkProgram.PROGRAM_ARGS);
// can be overridden by runtime args
String programArgsKey = stageName + "." + ExternalSparkProgram.PROGRAM_ARGS;
if (sec.getRuntimeArguments().containsKey(programArgsKey)) {
programArgs = sec.getRuntimeArguments().get(programArgsKey);
}
return programArgs;
}
Aggregations