use of org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition in project tbd-studio-se by Talend.
the class ModuleGroupsUtilsTest method getModuleGroupsTest_withoutAdditionCondition.
@Test
public void getModuleGroupsTest_withoutAdditionCondition() {
String condition = null;
Set<DistributionModuleGroup> groups = ModuleGroupsUtils.getModuleGroups(distribution, version, condition, groupName, true);
assertEquals(groups.size(), 1);
DistributionModuleGroup dmg = groups.iterator().next();
SparkBatchLinkedNodeCondition underlyingCondition = new SparkBatchLinkedNodeCondition(distribution, version);
assertEquals(dmg.getModuleName(), groupName);
// Without an additional condition, the returned module group is actually a SparkBatchLinkedNodeCondition
assertEquals(dmg.getRequiredIf().getConditionString(), underlyingCondition.getCondition().getConditionString());
}
use of org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition in project tbd-studio-se by Talend.
the class EMR5290SparkBatchAzureNodeModuleGroup method getModuleGroups.
public static Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) {
Set<DistributionModuleGroup> hs = new HashSet<>();
DistributionModuleGroup dmg = new DistributionModuleGroup(EMR5290Constant.SPARK_AZURE_MRREQUIRED_MODULE_GROUP.getModuleName(), true, new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_AZURE_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmg);
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition in project tbd-studio-se by Talend.
the class ModuleGroupsUtils method getModuleGroups.
/**
* Utility method to create the collection of {@link DistributionModuleGroup} with a condition made of a
* {@link SparkBatchLinkedNodeCondition} and an additional raw condition
*
* @param distribution the distribution key
* @param version the version key
* @param condition a nullable additional condition
* @param moduleName the module name
* @param mrRequired if the module group is mrRequired
* @return a set of {@link DistributionModuleGroup}
*/
public static Set<DistributionModuleGroup> getModuleGroups(String distribution, String version, ComponentCondition condition, String moduleName, boolean mrRequired) {
Set<DistributionModuleGroup> hs = new HashSet<>();
ComponentCondition distribCondition = new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition();
ComponentCondition cc = null;
if (condition != null) {
cc = new MultiComponentCondition(condition, BooleanOperator.AND, distribCondition);
} else {
cc = distribCondition;
}
DistributionModuleGroup dmg = new DistributionModuleGroup(moduleName, mrRequired, cc);
hs.add(dmg);
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicSparkBatchAzureNodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkAzureMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_AZURE_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkAzureMrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkAzureMrRequiredRuntimeId)) {
DistributionModuleGroup dmg = new DistributionModuleGroup(sparkAzureMrRequiredRuntimeId, true, new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_AZURE_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmg);
}
return hs;
}
use of org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition in project tbd-studio-se by Talend.
the class DynamicSparkBatchParquetNodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkParquetRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_PARQUET_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkParquetRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkParquetRequiredRuntimeId)) {
DistributionModuleGroup dmg = new DistributionModuleGroup(sparkParquetRequiredRuntimeId, true, new SparkBatchLinkedNodeCondition(distribution, version).getCondition());
hs.add(dmg);
}
return hs;
}
Aggregations