Search in sources :

Example 1 with BasicExpression

use of org.talend.hadoop.distribution.condition.BasicExpression in project tbd-studio-se by Talend.

the class DynamicHDPMapReduceModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups();
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    ComponentCondition useAtlas = new SimpleComponentCondition(new BasicExpression(MRConstant.USE_ATLAS));
    String atlasSpark1RuntimeId = getPluginAdapter().getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_1_MODULE_GROUP.getModuleName());
    checkRuntimeId(atlasSpark1RuntimeId);
    if (StringUtils.isNotBlank(atlasSpark1RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(atlasSpark1RuntimeId, false, useAtlas));
    }
    return moduleGroups;
}
Also used : BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 2 with BasicExpression

use of org.talend.hadoop.distribution.condition.BasicExpression in project tbd-studio-se by Talend.

the class DynamicHDPSparkBatchModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups();
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String spark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_MODULE_GROUP.getModuleName());
    String sparkMRRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MRREQUIRED_MODULE_GROUP.getModuleName());
    String hdfsRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP.getModuleName());
    String hdfsNotSpark16RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_NOT_SPARK_1_6_MODULE_GROUP.getModuleName());
    String tezNotSpark16RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.TEZ_NOT_SPARK_1_6_MODULE_GROUP.getModuleName());
    String mapReduceRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.MAPREDUCE_MODULE_GROUP.getModuleName());
    String atlasSpark1RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_1_MODULE_GROUP.getModuleName());
    String atlasSpark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_2_MODULE_GROUP.getModuleName());
    String sqoopRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_MODULE_GROUP.getModuleName());
    String sqoopParquetRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_PARQUET_MODULE_GROUP.getModuleName());
    String hBaseRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HBASE_MODULE_GROUP.getModuleName());
    String sparkS3RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_S3_MRREQUIRED_MODULE_GROUP.getModuleName());
    checkRuntimeId(spark2RuntimeId);
    checkRuntimeId(sparkMRRequiredRuntimeId);
    checkRuntimeId(hdfsRuntimeId);
    checkRuntimeId(hdfsNotSpark16RuntimeId);
    checkRuntimeId(tezNotSpark16RuntimeId);
    checkRuntimeId(mapReduceRuntimeId);
    checkRuntimeId(atlasSpark1RuntimeId);
    checkRuntimeId(atlasSpark2RuntimeId);
    checkRuntimeId(sqoopRuntimeId);
    checkRuntimeId(sqoopParquetRuntimeId);
    checkRuntimeId(hBaseRuntimeId);
    checkRuntimeId(sparkS3RuntimeId);
    ComponentCondition useAtlas = new SimpleComponentCondition(new BasicExpression(MRConstant.USE_ATLAS));
    ComponentCondition atlasSpark1x = new MultiComponentCondition(useAtlas, BooleanOperator.AND, conditionSpark1);
    ComponentCondition atlasSpark2x = new MultiComponentCondition(useAtlas, BooleanOperator.AND, conditionSpark2);
    if (StringUtils.isNotBlank(sparkMRRequiredRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkMRRequiredRuntimeId, true, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sparkMRRequiredRuntimeId, true, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(hdfsRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsNotSpark16RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsNotSpark16RuntimeId, false, conditionNotSpark16));
    }
    if (StringUtils.isNotBlank(tezNotSpark16RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(tezNotSpark16RuntimeId, false, conditionNotSpark16));
    }
    if (StringUtils.isNotBlank(mapReduceRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(mapReduceRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(mapReduceRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(atlasSpark1RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(atlasSpark1RuntimeId, true, atlasSpark1x));
    }
    if (StringUtils.isNotBlank(atlasSpark2RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(atlasSpark2RuntimeId, true, atlasSpark2x));
    }
    if (StringUtils.isNotBlank(sqoopRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sqoopRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sqoopRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(sqoopParquetRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sqoopParquetRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sqoopParquetRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hBaseRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hBaseRuntimeId, true, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(hBaseRuntimeId, true, conditionSpark2));
    }
    if (StringUtils.isNotBlank(sparkS3RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkS3RuntimeId, true));
    }
    return moduleGroups;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 3 with BasicExpression

use of org.talend.hadoop.distribution.condition.BasicExpression in project tbd-studio-se by Talend.

the class DynamicCDHSparkBatchModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups();
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String sparkMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MRREQUIRED_MODULE_GROUP.getModuleName());
    String hdfsSpark1_6RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP_SPARK1_6.getModuleName());
    String hdfsSpark2_1RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP_SPARK2_1.getModuleName());
    String hdfsCommonRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP_COMMON.getModuleName());
    String mrRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.MAPREDUCE_MODULE_GROUP.getModuleName());
    String talendClouderaNaviRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicCDHModuleGroupConstant.TALEND_CLOUDERA_CDH_NAVIGATOR.getModuleName());
    checkRuntimeId(sparkMrRequiredRuntimeId);
    checkRuntimeId(hdfsSpark1_6RuntimeId);
    checkRuntimeId(hdfsSpark2_1RuntimeId);
    checkRuntimeId(hdfsCommonRuntimeId);
    checkRuntimeId(mrRuntimeId);
    checkRuntimeId(talendClouderaNaviRuntimeId);
    if (StringUtils.isNotBlank(sparkMrRequiredRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkMrRequiredRuntimeId, true, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sparkMrRequiredRuntimeId, true, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsSpark1_6RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsSpark1_6RuntimeId, false, conditionSpark1));
    }
    if (StringUtils.isNotBlank(hdfsSpark2_1RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsSpark2_1RuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsCommonRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsCommonRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(hdfsCommonRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(mrRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(mrRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(mrRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(talendClouderaNaviRuntimeId)) {
        ComponentCondition conditionUseNavigator = new SimpleComponentCondition(new BasicExpression(SparkBatchConstant.USE_CLOUDERA_NAVIGATOR));
        moduleGroups.add(new DistributionModuleGroup(talendClouderaNaviRuntimeId, true, conditionUseNavigator));
    }
    return moduleGroups;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 4 with BasicExpression

use of org.talend.hadoop.distribution.condition.BasicExpression in project tbd-studio-se by Talend.

the class DistributionVersion method getDisplayShowIf.

public String getDisplayShowIf() {
    final Expression trueExp = new BooleanExpression(true);
    final Expression falseExp = new BooleanExpression(false);
    ComponentCondition additionalCondition = displayCondition;
    if (additionalCondition != null && (trueExp.getExpressionString().equals(additionalCondition.getConditionString()) || falseExp.getExpressionString().equals(additionalCondition.getConditionString()))) {
        // Don't show a version if it's display condition is a BooleanCondition.
        return trueExp.getExpressionString().equals(additionalCondition.getConditionString()) ? Boolean.TRUE.toString() : Boolean.FALSE.toString();
    } else {
        // Compose the ComponentCondition to display a version.
        ComponentCondition condition;
        org.talend.hadoop.distribution.condition.Expression e = new BasicExpression(distribution.componentType.getDistributionParameter(), EqualityOperator.EQ, distribution.name);
        if (additionalCondition != null) {
            condition = new MultiComponentCondition(new SimpleComponentCondition(e), BooleanOperator.AND, new NestedComponentCondition(additionalCondition));
        } else {
            condition = new SimpleComponentCondition(e);
        }
        return condition.getConditionString();
    }
}
Also used : BooleanExpression(org.talend.hadoop.distribution.condition.BooleanExpression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) Expression(org.talend.hadoop.distribution.condition.Expression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) BooleanExpression(org.talend.hadoop.distribution.condition.BooleanExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) Expression(org.talend.hadoop.distribution.condition.Expression)

Example 5 with BasicExpression

use of org.talend.hadoop.distribution.condition.BasicExpression in project tbd-studio-se by Talend.

the class DynamicCDHSparkStreamingModuleGroup method init.

@Override
protected void init() {
    // $NON-NLS-1$
    spark1Condition = new SimpleComponentCondition(new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_1_6.getSparkVersion()));
    spark2Condition = new MultiComponentCondition(new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_2.getSparkVersion()), BooleanOperator.OR, new BasicExpression("SUPPORTED_SPARK_VERSION", EqualityOperator.EQ, ESparkVersion.SPARK_2_4_X.getSparkVersion()));
// $NON-NLS-1$
}
Also used : BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition)

Aggregations

BasicExpression (org.talend.hadoop.distribution.condition.BasicExpression)23 SimpleComponentCondition (org.talend.hadoop.distribution.condition.SimpleComponentCondition)20 MultiComponentCondition (org.talend.hadoop.distribution.condition.MultiComponentCondition)16 ComponentCondition (org.talend.hadoop.distribution.condition.ComponentCondition)15 HashSet (java.util.HashSet)13 DistributionModuleGroup (org.talend.hadoop.distribution.DistributionModuleGroup)11 DynamicPluginAdapter (org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter)5 Expression (org.talend.hadoop.distribution.condition.Expression)4 NestedComponentCondition (org.talend.hadoop.distribution.condition.NestedComponentCondition)4 ShowExpression (org.talend.hadoop.distribution.condition.ShowExpression)4 Set (java.util.Set)3 HashMap (java.util.HashMap)2 Test (org.junit.Test)2 LinkedNodeExpression (org.talend.hadoop.distribution.condition.LinkedNodeExpression)2 RawExpression (org.talend.hadoop.distribution.condition.RawExpression)2 SparkBatchLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition)2 SparkStreamingLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition)2 LinkedHashSet (java.util.LinkedHashSet)1 Map (java.util.Map)1 ComponentType (org.talend.hadoop.distribution.ComponentType)1