use of org.talend.hadoop.distribution.condition.RawExpression in project tbd-studio-se by Talend.
the class DynamicHDPGraphFramesNodeModuleGroup method getModuleGroups.
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version, String condition) throws Exception {
Set<DistributionModuleGroup> dmg = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
// $NON-NLS-1$
ComponentCondition spark1Condition = getComponentCondition(ESparkVersion.SPARK_1_6.getSparkVersion());
// $NON-NLS-1$
ComponentCondition spark2Condition = getComponentCondition(ESparkVersion.SPARK_2_1.getSparkVersion());
if (condition != null) {
ComponentCondition c = new SimpleComponentCondition(new RawExpression(condition));
spark1Condition = new MultiComponentCondition(spark1Condition, BooleanOperator.AND, c);
spark2Condition = new MultiComponentCondition(spark2Condition, BooleanOperator.AND, c);
}
String graphFramesMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_MRREQUIRED_MODULE_GROUP.getModuleName());
String graphFramesSpark2MrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_SPARK2_MRREQUIRED_MODULE_GROUP.getModuleName());
checkRuntimeId(graphFramesMrRequiredRuntimeId);
checkRuntimeId(graphFramesSpark2MrRequiredRuntimeId);
if (StringUtils.isNotBlank(graphFramesMrRequiredRuntimeId)) {
dmg.addAll(ModuleGroupsUtils.getModuleGroups(distribution, version, spark1Condition, graphFramesMrRequiredRuntimeId, true));
}
if (StringUtils.isNotBlank(graphFramesSpark2MrRequiredRuntimeId)) {
dmg.addAll(ModuleGroupsUtils.getModuleGroups(distribution, version, spark2Condition, graphFramesSpark2MrRequiredRuntimeId, true));
}
return dmg;
}
use of org.talend.hadoop.distribution.condition.RawExpression in project tbd-studio-se by Talend.
the class DynamicSqoopModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String sqoopRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_MODULE_GROUP.getModuleName());
String sqoopParquetRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_PARQUET_MODULE_GROUP.getModuleName());
String sqoopHiveRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_HIVE_MODULE_GROUP.getModuleName());
checkRuntimeId(sqoopRuntimeId);
checkRuntimeId(sqoopParquetRuntimeId);
checkRuntimeId(sqoopHiveRuntimeId);
if (StringUtils.isNotBlank(sqoopRuntimeId)) {
hs.add(new DistributionModuleGroup(sqoopRuntimeId));
}
if (StringUtils.isNotBlank(sqoopParquetRuntimeId)) {
ComponentCondition parquetOutputCondition = new SimpleComponentCondition(new BasicExpression(SqoopConstant.FILE_FORMAT, EqualityOperator.EQ, SqoopConstant.PAQUET_OUTPUT_FORMAT));
hs.add(new DistributionModuleGroup(sqoopParquetRuntimeId, true, parquetOutputCondition));
}
if (StringUtils.isNotBlank(sqoopHiveRuntimeId)) {
ComponentCondition hiveOutputCondition = new SimpleComponentCondition(new RawExpression("ADDITIONAL_JAVA CONTAINS {ADDITIONAL_ARGUMENT=\"hive.import\", ADDITIONAL_VALUE=\"true\"}"));
hs.add(new DistributionModuleGroup(sqoopHiveRuntimeId, true, hiveOutputCondition));
}
return hs;
}
Aggregations