use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicHDPSparkBatchModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups();
if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
moduleGroups.addAll(moduleGroupsFromSuper);
}
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String spark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_MODULE_GROUP.getModuleName());
String sparkMRRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MRREQUIRED_MODULE_GROUP.getModuleName());
String hdfsRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP.getModuleName());
String hdfsNotSpark16RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_NOT_SPARK_1_6_MODULE_GROUP.getModuleName());
String tezNotSpark16RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.TEZ_NOT_SPARK_1_6_MODULE_GROUP.getModuleName());
String mapReduceRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.MAPREDUCE_MODULE_GROUP.getModuleName());
String atlasSpark1RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_1_MODULE_GROUP.getModuleName());
String atlasSpark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_2_MODULE_GROUP.getModuleName());
String sqoopRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_MODULE_GROUP.getModuleName());
String sqoopParquetRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_PARQUET_MODULE_GROUP.getModuleName());
String hBaseRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HBASE_MODULE_GROUP.getModuleName());
String sparkS3RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_S3_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(spark2RuntimeId);
checkRuntimeId(sparkMRRequiredRuntimeId);
checkRuntimeId(hdfsRuntimeId);
checkRuntimeId(hdfsNotSpark16RuntimeId);
checkRuntimeId(tezNotSpark16RuntimeId);
checkRuntimeId(mapReduceRuntimeId);
checkRuntimeId(atlasSpark1RuntimeId);
checkRuntimeId(atlasSpark2RuntimeId);
checkRuntimeId(sqoopRuntimeId);
checkRuntimeId(sqoopParquetRuntimeId);
checkRuntimeId(hBaseRuntimeId);
checkRuntimeId(sparkS3RuntimeId);
ComponentCondition useAtlas = new SimpleComponentCondition(new BasicExpression(MRConstant.USE_ATLAS));
ComponentCondition atlasSpark1x = new MultiComponentCondition(useAtlas, BooleanOperator.AND, conditionSpark1);
ComponentCondition atlasSpark2x = new MultiComponentCondition(useAtlas, BooleanOperator.AND, conditionSpark2);
if (StringUtils.isNotBlank(sparkMRRequiredRuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(sparkMRRequiredRuntimeId, true, conditionSpark1));
moduleGroups.add(new DistributionModuleGroup(sparkMRRequiredRuntimeId, true, conditionSpark2));
}
if (StringUtils.isNotBlank(hdfsRuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(hdfsRuntimeId, false, conditionSpark1));
moduleGroups.add(new DistributionModuleGroup(hdfsRuntimeId, false, conditionSpark2));
}
if (StringUtils.isNotBlank(hdfsNotSpark16RuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(hdfsNotSpark16RuntimeId, false, conditionNotSpark16));
}
if (StringUtils.isNotBlank(tezNotSpark16RuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(tezNotSpark16RuntimeId, false, conditionNotSpark16));
}
if (StringUtils.isNotBlank(mapReduceRuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(mapReduceRuntimeId, false, conditionSpark1));
moduleGroups.add(new DistributionModuleGroup(mapReduceRuntimeId, false, conditionSpark2));
}
if (StringUtils.isNotBlank(atlasSpark1RuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(atlasSpark1RuntimeId, true, atlasSpark1x));
}
if (StringUtils.isNotBlank(atlasSpark2RuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(atlasSpark2RuntimeId, true, atlasSpark2x));
}
if (StringUtils.isNotBlank(sqoopRuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(sqoopRuntimeId, false, conditionSpark1));
moduleGroups.add(new DistributionModuleGroup(sqoopRuntimeId, false, conditionSpark2));
}
if (StringUtils.isNotBlank(sqoopParquetRuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(sqoopParquetRuntimeId, false, conditionSpark1));
moduleGroups.add(new DistributionModuleGroup(sqoopParquetRuntimeId, false, conditionSpark2));
}
if (StringUtils.isNotBlank(hBaseRuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(hBaseRuntimeId, true, conditionSpark1));
moduleGroups.add(new DistributionModuleGroup(hBaseRuntimeId, true, conditionSpark2));
}
if (StringUtils.isNotBlank(sparkS3RuntimeId)) {
moduleGroups.add(new DistributionModuleGroup(sparkS3RuntimeId, true));
}
return moduleGroups;
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DistributionVersion method getDisplayShowIf.
public String getDisplayShowIf() {
final Expression trueExp = new BooleanExpression(true);
final Expression falseExp = new BooleanExpression(false);
ComponentCondition additionalCondition = displayCondition;
if (additionalCondition != null && (trueExp.getExpressionString().equals(additionalCondition.getConditionString()) || falseExp.getExpressionString().equals(additionalCondition.getConditionString()))) {
// Don't show a version if it's display condition is a BooleanCondition.
return trueExp.getExpressionString().equals(additionalCondition.getConditionString()) ? Boolean.TRUE.toString() : Boolean.FALSE.toString();
} else {
// Compose the ComponentCondition to display a version.
ComponentCondition condition;
org.talend.hadoop.distribution.condition.Expression e = new BasicExpression(distribution.componentType.getDistributionParameter(), EqualityOperator.EQ, distribution.name);
if (additionalCondition != null) {
condition = new MultiComponentCondition(new SimpleComponentCondition(e), BooleanOperator.AND, new NestedComponentCondition(additionalCondition));
} else {
condition = new SimpleComponentCondition(e);
}
return condition.getConditionString();
}
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicCDHSparkStreamingModuleGroup method init.
@Override
protected void init() {
// $NON-NLS-1$
spark1Condition = new SimpleComponentCondition(new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_1_6.getSparkVersion()));
spark2Condition = new MultiComponentCondition(new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_2.getSparkVersion()), BooleanOperator.OR, new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_4_X.getSparkVersion()));
// $NON-NLS-1$
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingKinesisNodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String spark2KinesisMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_KINESIS_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(spark2KinesisMrRequiredRuntimeId);
if (StringUtils.isNotBlank(spark2KinesisMrRequiredRuntimeId)) {
DistributionModuleGroup dmgSpark2 = new DistributionModuleGroup(spark2KinesisMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version).getCondition(), BooleanOperator.AND, spark2Condition)));
hs.add(dmgSpark2);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingKafkaClientModuleGroup method init.
protected void init() {
spark1Condition = new SimpleComponentCondition(new LinkedNodeExpression(// $NON-NLS-1$
SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER, // $NON-NLS-1$
"SUPPORTED_SPARK_VERSION", // $NON-NLS-1$
EqualityOperator.EQ, ESparkVersion.SPARK_1_6.getSparkVersion()));
spark2Condition = new MultiComponentCondition(new LinkedNodeExpression(SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER, // $NON-NLS-1$
"SUPPORTED_SPARK_VERSION", // $NON-NLS-1$
EqualityOperator.EQ, // $NON-NLS-1$
ESparkVersion.SPARK_2_2.getSparkVersion()), BooleanOperator.OR, new LinkedNodeExpression(SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER, // $NON-NLS-1$
"SUPPORTED_SPARK_VERSION", // $NON-NLS-1$
EqualityOperator.EQ, // $NON-NLS-1$
ESparkVersion.SPARK_2_4_X.getSparkVersion()));
}
Aggregations