use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DynamicSparkBatchS3NodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sparkS3MrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_S3_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(sparkS3MrRequiredRuntimeId);
if (StringUtils.isNotBlank(sparkS3MrRequiredRuntimeId)) {
DistributionModuleGroup dmg = new DistributionModuleGroup(sparkS3MrRequiredRuntimeId, true, new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_S3_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmg);
}
return hs;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DynamicCDHGraphFramesNodeModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String graphFramesMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(graphFramesMrRequiredRuntimeId);
if (StringUtils.isNotBlank(graphFramesMrRequiredRuntimeId)) {
DistributionModuleGroup dmg = new DistributionModuleGroup(graphFramesMrRequiredRuntimeId, true, new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
hs.add(dmg);
}
return hs;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class ModuleGroupsUtils method getStreamingModuleGroups.
/**
* Utility method to create the collection of {@link DistributionModuleGroup} with a condition made of a
* {@link SparkStreamingLinkedNodeCondition} and an additional raw condition
*
* @param distribution the distribution key
* @param version the version key
* @param condition a nullable additional condition
* @param moduleName the module name
* @param mrRequired if the module group is mrRequired
* @return a set of {@link DistributionModuleGroup}
*/
public static Set<DistributionModuleGroup> getStreamingModuleGroups(String distribution, String version, ComponentCondition condition, String moduleName, boolean mrRequired) {
Set<DistributionModuleGroup> hs = new HashSet<>();
ComponentCondition distribCondition = new SparkStreamingLinkedNodeCondition(distribution, version).getCondition();
ComponentCondition cc = null;
if (condition != null) {
cc = new MultiComponentCondition(condition, BooleanOperator.AND, distribCondition);
} else {
cc = distribCondition;
}
DistributionModuleGroup dmg = new DistributionModuleGroup(moduleName, mrRequired, cc);
hs.add(dmg);
return hs;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class Spark30xDistribution method buildNodeModuleGroups.
protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroups(String distribution, String version) {
Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> result = super.buildNodeModuleGroups(distribution, version);
Set<DistributionModuleGroup> s3ModuleGroup = Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.S3.get(getVersion()), SparkBatchConstant.SPARK_BATCH_S3_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION);
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.S3_CONFIGURATION_COMPONENT), s3ModuleGroup);
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.S3_CONFIGURATION_COMPONENT), s3ModuleGroup);
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.MATCH_PREDICT_COMPONENT), Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.PARQUET.get(getVersion()), SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION));
// spark model encoder
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.TMODEL_ENCODER_COMPONENT), Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.MODEL_ENCODER.get(getVersion()), SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION));
// spark Streaming model encoder
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.TMODEL_ENCODER_COMPONENT), Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.MODEL_ENCODER.get(getVersion()), SparkStreamingConstant.SPARK_STREAMING_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION));
return result;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DBR550SparkBatchAzureNodeModuleGroupTest method testModuleGroups.
@Test
public void testModuleGroups() throws Exception {
Map<String, String> results = new HashMap<>();
results.put(DBR550Constant.SPARK_AZURE_MRREQUIRED_MODULE_GROUP.getModuleName(), // $NON-NLS-1$
"((#LINK@NODE.STORAGE_CONFIGURATION.DISTRIBUTION == 'DATABRICKS') AND (#LINK@NODE.STORAGE_CONFIGURATION.SPARK_VERSION == 'Databricks_5_5'))");
Set<DistributionModuleGroup> moduleGroups = DBR550SparkBatchAzureNodeModuleGroup.getModuleGroups(DBR550Distribution.DISTRIBUTION_NAME, DBR550Distribution.VERSION);
assertEquals(results.size(), moduleGroups.size());
for (DistributionModuleGroup module : moduleGroups) {
// $NON-NLS-1$
assertTrue("Should contain module " + module.getModuleName(), results.containsKey(module.getModuleName()));
if (results.get(module.getModuleName()) == null) {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is not null.", results.get(module.getModuleName()) == null);
} else {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is null, but it should be " + results.get(module.getModuleName()) + ".", // $NON-NLS-1$
results.get(module.getModuleName()) != null);
assertEquals(results.get(module.getModuleName()), module.getRequiredIf().getConditionString());
}
}
}
Aggregations