use of org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicHDPSparkStreamingKafkaClientModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkKafkaClientMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_KAFKA_CLIENT_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkKafkaClientMrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkKafkaClientMrRequiredRuntimeId)) {
// Spark 1
DistributionModuleGroup dmgSpark1 = new DistributionModuleGroup(sparkKafkaClientMrRequiredRuntimeId, true, new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmgSpark1);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingFlumeNodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkFlumeMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_FLUME_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkFlumeMrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkFlumeMrRequiredRuntimeId)) {
DistributionModuleGroup dmg = new DistributionModuleGroup(sparkFlumeMrRequiredRuntimeId, true, new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.FLUME_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmg);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingKinesisNodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String spark2KinesisMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_KINESIS_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(spark2KinesisMrRequiredRuntimeId);
if (StringUtils.isNotBlank(spark2KinesisMrRequiredRuntimeId)) {
DistributionModuleGroup dmgSpark2 = new DistributionModuleGroup(spark2KinesisMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version).getCondition(), BooleanOperator.AND, spark2Condition)));
hs.add(dmgSpark2);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingS3NodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkS3MrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_S3_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkS3MrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkS3MrRequiredRuntimeId)) {
DistributionModuleGroup dmg = new DistributionModuleGroup(sparkS3MrRequiredRuntimeId, true, new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.S3_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmg);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicSparkStreamingKafkaClientModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkKafkaClientMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_KAFKA_CLIENT_MRREQUIRED_MODULE_GROUP.getModuleName());
String spark2KafkaClientMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_KAFKA_CLIENT_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkKafkaClientMrRequiredRuntimeId);
checkRuntimeId(spark2KafkaClientMrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkKafkaClientMrRequiredRuntimeId)) {
// Spark 1
DistributionModuleGroup dmgSpark1 = new DistributionModuleGroup(sparkKafkaClientMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition(), BooleanOperator.AND, spark1Condition)));
hs.add(dmgSpark1);
}
if (StringUtils.isNotBlank(spark2KafkaClientMrRequiredRuntimeId)) {
// Spark 2
DistributionModuleGroup dmgSpark2 = new DistributionModuleGroup(spark2KafkaClientMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version, SparkStreamingConstant.KAFKA_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition(), BooleanOperator.AND, spark2Condition)));
hs.add(dmgSpark2);
}
return hs;
}
Aggregations