Search in sources :

Example 11 with MultiComponentCondition

use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.

the class ModuleGroupsUtils method getModuleGroups.

/**
 * Utility method to create the collection of {@link DistributionModuleGroup} with a condition made of a
 * {@link SparkBatchLinkedNodeCondition} and an additional raw condition
 *
 * @param distribution the distribution key
 * @param version the version key
 * @param condition a nullable additional condition
 * @param moduleName the module name
 * @param mrRequired if the module group is mrRequired
 * @return a set of {@link DistributionModuleGroup}
 */
public static Set<DistributionModuleGroup> getModuleGroups(String distribution, String version, ComponentCondition condition, String moduleName, boolean mrRequired) {
    Set<DistributionModuleGroup> hs = new HashSet<>();
    ComponentCondition distribCondition = new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition();
    ComponentCondition cc = null;
    if (condition != null) {
        cc = new MultiComponentCondition(condition, BooleanOperator.AND, distribCondition);
    } else {
        cc = distribCondition;
    }
    DistributionModuleGroup dmg = new DistributionModuleGroup(moduleName, mrRequired, cc);
    hs.add(dmg);
    return hs;
}
Also used : SparkBatchLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 12 with MultiComponentCondition

use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.

the class AbstractDistribution method buildModuleGroups.

protected Map<ComponentType, Set<DistributionModuleGroup>> buildModuleGroups() {
    Map<ComponentType, Set<DistributionModuleGroup>> result = new HashMap<>();
    // HCatalog
    result.put(ComponentType.HCATALOG, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HDFS.get(this.getVersion()), false));
    // HDFS
    result.put(ComponentType.HDFS, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HDFS.get(this.getVersion()), false));
    // Hbase
    result.put(ComponentType.HBASE, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HBASE.get(this.getVersion()), true));
    // Hive
    ComponentCondition hiveOnHbaseCondition = new MultiComponentCondition(new SimpleComponentCondition(new BasicExpression(// 
    HiveConstant.HIVE_CONFIGURATION_COMPONENT_HBASEPARAMETER)), // 
    BooleanOperator.AND, new SimpleComponentCondition(new ShowExpression(HiveConstant.HIVE_CONFIGURATION_COMPONENT_HBASEPARAMETER)));
    Set<DistributionModuleGroup> hiveModuleGroups = new HashSet<>();
    hiveModuleGroups.addAll(ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HIVE.get(this.getVersion()), false));
    hiveModuleGroups.addAll(ModuleGroupsUtils.getModuleGroups(hiveOnHbaseCondition, ModuleGroupName.HBASE.get(this.getVersion()), false));
    result.put(ComponentType.HIVE, hiveModuleGroups);
    // Hive on Spark
    result.put(ComponentType.HIVEONSPARK, ModuleGroupsUtils.getModuleGroups((ComponentCondition) null, ModuleGroupName.HIVE.get(this.getVersion()), true));
    // Sqoop
    result.put(ComponentType.SQOOP, SqoopModuleGroup.getModuleGroups(this.getVersion()));
    result.put(ComponentType.SPARKBATCH, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.SPARK_BATCH.get(this.getVersion()), true));
    result.put(ComponentType.SPARKSTREAMING, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.SPARK_STREAMING.get(this.getVersion()), true));
    return result;
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) ShowExpression(org.talend.hadoop.distribution.condition.ShowExpression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) HashMap(java.util.HashMap) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) HashSet(java.util.HashSet)

Example 13 with MultiComponentCondition

use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.

the class DynamicHDPSparkStreamingKinesisNodeModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups(distribution, version);
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String spark2KinesisMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_KINESIS_MRREQUIRED_MODULE_GROUP.getModuleName());
    checkRuntimeId(spark2KinesisMrRequiredRuntimeId);
    if (StringUtils.isNotBlank(spark2KinesisMrRequiredRuntimeId)) {
        DistributionModuleGroup dmgSpark1 = new DistributionModuleGroup(spark2KinesisMrRequiredRuntimeId, true, new NestedComponentCondition(new MultiComponentCondition(new SparkStreamingLinkedNodeCondition(distribution, version).getCondition(), BooleanOperator.AND, spark2Condition)));
        moduleGroups.add(dmgSpark1);
    }
    return moduleGroups;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) SparkStreamingLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 14 with MultiComponentCondition

use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.

the class DistributionVersionModule method getModuleRequiredIf.

public ComponentCondition getModuleRequiredIf() {
    ComponentCondition condition;
    // The import is needed only if the good version and the good distribution are selected, and
    // if the Distribution is shown. The second condition to take the
    // USE_EXISTING_CONNECTIOn into account.
    final ComponentType componentType = distributionVersion.distribution.componentType;
    Expression distributionSelected = new BasicExpression(componentType.getDistributionParameter(), EqualityOperator.EQ, distributionVersion.distribution.name);
    Expression distributionVersionSelected = new BasicExpression(componentType.getVersionParameter(), EqualityOperator.EQ, distributionVersion.version);
    Expression distributionShown = new ShowExpression(componentType.getDistributionParameter());
    condition = new MultiComponentCondition(new SimpleComponentCondition(distributionSelected), BooleanOperator.AND, new MultiComponentCondition(new SimpleComponentCondition(distributionVersionSelected), BooleanOperator.AND, new SimpleComponentCondition(distributionShown)));
    if (moduleGroup.getRequiredIf() != null) {
        condition = new MultiComponentCondition(condition, BooleanOperator.AND, new NestedComponentCondition(moduleGroup.getRequiredIf()));
    }
    return condition;
}
Also used : ComponentType(org.talend.hadoop.distribution.ComponentType) ShowExpression(org.talend.hadoop.distribution.condition.ShowExpression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) ShowExpression(org.talend.hadoop.distribution.condition.ShowExpression) LinkedNodeExpression(org.talend.hadoop.distribution.condition.LinkedNodeExpression) Expression(org.talend.hadoop.distribution.condition.Expression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) RawExpression(org.talend.hadoop.distribution.condition.RawExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition)

Example 15 with MultiComponentCondition

use of org.talend.hadoop.distribution.condition.MultiComponentCondition in project tbd-studio-se by Talend.

the class ComponentConditionUtil method generateSparkVersionShowIfConditions.

/**
 * Generates the "SHOW_IF" condition for the "SUPPORTED_SPARK_VERSION" drop down list. Given a map of Spark versions
 * and corresponding supported hadoop versions, it builds a {@link ComponentCondition} for each entry in the map.
 *
 * @param supportedSparkVersions the map of Spark versions
 * @return an array of a String reprensation of a {@link ComponentCondition}
 */
public static String[] generateSparkVersionShowIfConditions(Map<ESparkVersion, Set<DistributionVersion>> supportedSparkVersions) {
    String[] results = null;
    if (supportedSparkVersions != null) {
        results = new String[supportedSparkVersions.size()];
        int conditionIndex = 0;
        for (Map.Entry<ESparkVersion, Set<DistributionVersion>> entry : supportedSparkVersions.entrySet()) {
            Set<ComponentCondition> multiComponentConditions = new LinkedHashSet<>();
            for (DistributionVersion distributionVersion : entry.getValue()) {
                SimpleComponentCondition distribution = new SimpleComponentCondition(new BasicExpression("DISTRIBUTION", EqualityOperator.EQ, // $NON-NLS-1$
                distributionVersion.distribution.getName()));
                SimpleComponentCondition version = new SimpleComponentCondition(new BasicExpression("SPARK_VERSION", EqualityOperator.EQ, // $NON-NLS-1$
                distributionVersion.getVersion()));
                multiComponentConditions.add(new MultiComponentCondition(distribution, BooleanOperator.AND, version));
            }
            ComponentCondition componentCondition = buildDistributionShowIf(multiComponentConditions);
            results[conditionIndex++] = componentCondition != null ? componentCondition.getConditionString() : null;
        }
    }
    return results;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) LinkedHashSet(java.util.LinkedHashSet) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ESparkVersion(org.talend.hadoop.distribution.ESparkVersion) DistributionVersion(org.talend.hadoop.distribution.model.DistributionVersion) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) Map(java.util.Map)

Aggregations

MultiComponentCondition (org.talend.hadoop.distribution.condition.MultiComponentCondition)24 SimpleComponentCondition (org.talend.hadoop.distribution.condition.SimpleComponentCondition)18 BasicExpression (org.talend.hadoop.distribution.condition.BasicExpression)14 HashSet (java.util.HashSet)13 DistributionModuleGroup (org.talend.hadoop.distribution.DistributionModuleGroup)12 ComponentCondition (org.talend.hadoop.distribution.condition.ComponentCondition)12 NestedComponentCondition (org.talend.hadoop.distribution.condition.NestedComponentCondition)9 DynamicPluginAdapter (org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter)7 SparkStreamingLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition)6 ShowExpression (org.talend.hadoop.distribution.condition.ShowExpression)4 Expression (org.talend.hadoop.distribution.condition.Expression)3 LinkedNodeExpression (org.talend.hadoop.distribution.condition.LinkedNodeExpression)3 Set (java.util.Set)2 RawExpression (org.talend.hadoop.distribution.condition.RawExpression)2 SparkBatchLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition)2 HashMap (java.util.HashMap)1 LinkedHashSet (java.util.LinkedHashSet)1 Map (java.util.Map)1 Test (org.junit.Test)1 ComponentType (org.talend.hadoop.distribution.ComponentType)1