use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicHDPGraphFramesNodeModuleGroup method getModuleGroups.
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version, String condition) throws Exception {
Set<DistributionModuleGroup> dmg = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
// $NON-NLS-1$
ComponentCondition spark1Condition = getComponentCondition(ESparkVersion.SPARK_1_6.getSparkVersion());
// $NON-NLS-1$
ComponentCondition spark2Condition = getComponentCondition(ESparkVersion.SPARK_2_1.getSparkVersion());
if (condition != null) {
ComponentCondition c = new SimpleComponentCondition(new RawExpression(condition));
spark1Condition = new MultiComponentCondition(spark1Condition, BooleanOperator.AND, c);
spark2Condition = new MultiComponentCondition(spark2Condition, BooleanOperator.AND, c);
}
String graphFramesMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_MRREQUIRED_MODULE_GROUP.getModuleName());
String graphFramesSpark2MrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_SPARK2_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(graphFramesMrRequiredRuntimeId);
checkRuntimeId(graphFramesSpark2MrRequiredRuntimeId);
if (StringUtils.isNotBlank(graphFramesMrRequiredRuntimeId)) {
dmg.addAll(ModuleGroupsUtils.getModuleGroups(distribution, version, spark1Condition, graphFramesMrRequiredRuntimeId, true));
}
if (StringUtils.isNotBlank(graphFramesSpark2MrRequiredRuntimeId)) {
dmg.addAll(ModuleGroupsUtils.getModuleGroups(distribution, version, spark2Condition, graphFramesSpark2MrRequiredRuntimeId, true));
}
return dmg;
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingKafkaAssemblyModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkKafkaAssemblyMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_KAFKA_ASSEMBLY_MRREQUIRED_MODULE_GROUP.getModuleName());
String spark2KafkaAssemblyMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_KAFKA_ASSEMBLY_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkKafkaAssemblyMrRequiredRuntimeId);
checkRuntimeId(spark2KafkaAssemblyMrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkKafkaAssemblyMrRequiredRuntimeId)) {
// Spark 1.6 Kafka assembly
DistributionModuleGroup dmgSpark16 = new DistributionModuleGroup(sparkKafkaAssemblyMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition(), BooleanOperator.AND, spark1Condition)));
hs.add(dmgSpark16);
}
if (StringUtils.isNotBlank(spark2KafkaAssemblyMrRequiredRuntimeId)) {
// Spark 2.1 Kafka assembly
DistributionModuleGroup dmgSpark21 = new DistributionModuleGroup(spark2KafkaAssemblyMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition(), BooleanOperator.AND, spark2Condition)));
hs.add(dmgSpark21);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingKafkaAssemblyModuleGroup method init.
protected void init() {
spark1Condition = new SimpleComponentCondition(new LinkedNodeExpression(// $NON-NLS-1$
SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER, // $NON-NLS-1$
"SUPPORTED_SPARK_VERSION", // $NON-NLS-1$
EqualityOperator.EQ, ESparkVersion.SPARK_1_6.getSparkVersion()));
spark2Condition = new MultiComponentCondition(new LinkedNodeExpression(SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER, // $NON-NLS-1$
"SUPPORTED_SPARK_VERSION", // $NON-NLS-1$
EqualityOperator.EQ, // $NON-NLS-1$
ESparkVersion.SPARK_2_2.getSparkVersion()), BooleanOperator.OR, new LinkedNodeExpression(SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER, // $NON-NLS-1$
"SUPPORTED_SPARK_VERSION", // $NON-NLS-1$
EqualityOperator.EQ, // $NON-NLS-1$
ESparkVersion.SPARK_2_4_X.getSparkVersion()));
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class ModuleGroupsUtils method getStreamingModuleGroups.
/**
* Utility method to create the collection of {@link DistributionModuleGroup} with a condition made of a
* {@link SparkStreamingLinkedNodeCondition} and an additional raw condition
*
* @param distribution the distribution key
* @param version the version key
* @param condition a nullable additional condition
* @param moduleName the module name
* @param mrRequired if the module group is mrRequired
* @return a set of {@link DistributionModuleGroup}
*/
public static Set<DistributionModuleGroup> getStreamingModuleGroups(String distribution, String version, ComponentCondition condition, String moduleName, boolean mrRequired) {
Set<DistributionModuleGroup> hs = new HashSet<>();
ComponentCondition distribCondition = new SparkStreamingLinkedNodeCondition(distribution, version).getCondition();
ComponentCondition cc = null;
if (condition != null) {
cc = new MultiComponentCondition(condition, BooleanOperator.AND, distribCondition);
} else {
cc = distribCondition;
}
DistributionModuleGroup dmg = new DistributionModuleGroup(moduleName, mrRequired, cc);
hs.add(dmg);
return hs;
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingModuleGroup method init.
protected void init() {
// $NON-NLS-1$
spark1Condition = new SimpleComponentCondition(new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_1_6.getSparkVersion()));
spark2Condition = new MultiComponentCondition(// $NON-NLS-1$
new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_0.getSparkVersion()), BooleanOperator.OR, new MultiComponentCondition(// $NON-NLS-1$
new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_1.getSparkVersion()), BooleanOperator.OR, new MultiComponentCondition(// $NON-NLS-1$
new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_2.getSparkVersion()), BooleanOperator.OR, new MultiComponentCondition(// $NON-NLS-1$
new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_3_X.getSparkVersion()), BooleanOperator.OR, // $NON-NLS-1$
new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_4_X.getSparkVersion())))));
}
Aggregations