Search in sources :

Example 11 with ComponentCondition

use of org.talend.hadoop.distribution.condition.ComponentCondition in project tbd-studio-se by Talend.

the class ModuleGroupsUtils method getModuleGroups.

/**
 * Utility method to create the collection of {@link DistributionModuleGroup} with a condition made of a
 * {@link SparkBatchLinkedNodeCondition} and an additional raw condition
 *
 * @param distribution the distribution key
 * @param version the version key
 * @param condition a nullable additional condition
 * @param moduleName the module name
 * @param mrRequired if the module group is mrRequired
 * @return a set of {@link DistributionModuleGroup}
 */
public static Set<DistributionModuleGroup> getModuleGroups(String distribution, String version, ComponentCondition condition, String moduleName, boolean mrRequired) {
    Set<DistributionModuleGroup> hs = new HashSet<>();
    ComponentCondition distribCondition = new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition();
    ComponentCondition cc = null;
    if (condition != null) {
        cc = new MultiComponentCondition(condition, BooleanOperator.AND, distribCondition);
    } else {
        cc = distribCondition;
    }
    DistributionModuleGroup dmg = new DistributionModuleGroup(moduleName, mrRequired, cc);
    hs.add(dmg);
    return hs;
}
Also used : SparkBatchLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 12 with ComponentCondition

use of org.talend.hadoop.distribution.condition.ComponentCondition in project tbd-studio-se by Talend.

the class AbstractDistribution method buildModuleGroups.

protected Map<ComponentType, Set<DistributionModuleGroup>> buildModuleGroups() {
    Map<ComponentType, Set<DistributionModuleGroup>> result = new HashMap<>();
    // HCatalog
    result.put(ComponentType.HCATALOG, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HDFS.get(this.getVersion()), false));
    // HDFS
    result.put(ComponentType.HDFS, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HDFS.get(this.getVersion()), false));
    // Hbase
    result.put(ComponentType.HBASE, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HBASE.get(this.getVersion()), true));
    // Hive
    ComponentCondition hiveOnHbaseCondition = new MultiComponentCondition(new SimpleComponentCondition(new BasicExpression(// 
    HiveConstant.HIVE_CONFIGURATION_COMPONENT_HBASEPARAMETER)), // 
    BooleanOperator.AND, new SimpleComponentCondition(new ShowExpression(HiveConstant.HIVE_CONFIGURATION_COMPONENT_HBASEPARAMETER)));
    Set<DistributionModuleGroup> hiveModuleGroups = new HashSet<>();
    hiveModuleGroups.addAll(ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.HIVE.get(this.getVersion()), false));
    hiveModuleGroups.addAll(ModuleGroupsUtils.getModuleGroups(hiveOnHbaseCondition, ModuleGroupName.HBASE.get(this.getVersion()), false));
    result.put(ComponentType.HIVE, hiveModuleGroups);
    // Hive on Spark
    result.put(ComponentType.HIVEONSPARK, ModuleGroupsUtils.getModuleGroups((ComponentCondition) null, ModuleGroupName.HIVE.get(this.getVersion()), true));
    // Sqoop
    result.put(ComponentType.SQOOP, SqoopModuleGroup.getModuleGroups(this.getVersion()));
    result.put(ComponentType.SPARKBATCH, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.SPARK_BATCH.get(this.getVersion()), true));
    result.put(ComponentType.SPARKSTREAMING, ModuleGroupsUtils.getModuleGroups(null, ModuleGroupName.SPARK_STREAMING.get(this.getVersion()), true));
    return result;
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) ShowExpression(org.talend.hadoop.distribution.condition.ShowExpression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) HashMap(java.util.HashMap) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) HashSet(java.util.HashSet)

Example 13 with ComponentCondition

use of org.talend.hadoop.distribution.condition.ComponentCondition in project tbd-studio-se by Talend.

the class DistributionVersionModule method getModuleRequiredIf.

public ComponentCondition getModuleRequiredIf() {
    ComponentCondition condition;
    // The import is needed only if the good version and the good distribution are selected, and
    // if the Distribution is shown. The second condition to take the
    // USE_EXISTING_CONNECTIOn into account.
    final ComponentType componentType = distributionVersion.distribution.componentType;
    Expression distributionSelected = new BasicExpression(componentType.getDistributionParameter(), EqualityOperator.EQ, distributionVersion.distribution.name);
    Expression distributionVersionSelected = new BasicExpression(componentType.getVersionParameter(), EqualityOperator.EQ, distributionVersion.version);
    Expression distributionShown = new ShowExpression(componentType.getDistributionParameter());
    condition = new MultiComponentCondition(new SimpleComponentCondition(distributionSelected), BooleanOperator.AND, new MultiComponentCondition(new SimpleComponentCondition(distributionVersionSelected), BooleanOperator.AND, new SimpleComponentCondition(distributionShown)));
    if (moduleGroup.getRequiredIf() != null) {
        condition = new MultiComponentCondition(condition, BooleanOperator.AND, new NestedComponentCondition(moduleGroup.getRequiredIf()));
    }
    return condition;
}
Also used : ComponentType(org.talend.hadoop.distribution.ComponentType) ShowExpression(org.talend.hadoop.distribution.condition.ShowExpression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) ShowExpression(org.talend.hadoop.distribution.condition.ShowExpression) LinkedNodeExpression(org.talend.hadoop.distribution.condition.LinkedNodeExpression) Expression(org.talend.hadoop.distribution.condition.Expression) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) RawExpression(org.talend.hadoop.distribution.condition.RawExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition)

Example 14 with ComponentCondition

use of org.talend.hadoop.distribution.condition.ComponentCondition in project tbd-studio-se by Talend.

the class ComponentConditionUtil method generateSparkVersionShowIfConditions.

/**
 * Generates the "SHOW_IF" condition for the "SUPPORTED_SPARK_VERSION" drop down list. Given a map of Spark versions
 * and corresponding supported hadoop versions, it builds a {@link ComponentCondition} for each entry in the map.
 *
 * @param supportedSparkVersions the map of Spark versions
 * @return an array of a String reprensation of a {@link ComponentCondition}
 */
public static String[] generateSparkVersionShowIfConditions(Map<ESparkVersion, Set<DistributionVersion>> supportedSparkVersions) {
    String[] results = null;
    if (supportedSparkVersions != null) {
        results = new String[supportedSparkVersions.size()];
        int conditionIndex = 0;
        for (Map.Entry<ESparkVersion, Set<DistributionVersion>> entry : supportedSparkVersions.entrySet()) {
            Set<ComponentCondition> multiComponentConditions = new LinkedHashSet<>();
            for (DistributionVersion distributionVersion : entry.getValue()) {
                SimpleComponentCondition distribution = new SimpleComponentCondition(new BasicExpression("DISTRIBUTION", EqualityOperator.EQ, // $NON-NLS-1$
                distributionVersion.distribution.getName()));
                SimpleComponentCondition version = new SimpleComponentCondition(new BasicExpression("SPARK_VERSION", EqualityOperator.EQ, // $NON-NLS-1$
                distributionVersion.getVersion()));
                multiComponentConditions.add(new MultiComponentCondition(distribution, BooleanOperator.AND, version));
            }
            ComponentCondition componentCondition = buildDistributionShowIf(multiComponentConditions);
            results[conditionIndex++] = componentCondition != null ? componentCondition.getConditionString() : null;
        }
    }
    return results;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) LinkedHashSet(java.util.LinkedHashSet) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ESparkVersion(org.talend.hadoop.distribution.ESparkVersion) DistributionVersion(org.talend.hadoop.distribution.model.DistributionVersion) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) NestedComponentCondition(org.talend.hadoop.distribution.condition.NestedComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) Map(java.util.Map)

Example 15 with ComponentCondition

use of org.talend.hadoop.distribution.condition.ComponentCondition in project tbd-studio-se by Talend.

the class DynamicHDPGraphFramesNodeModuleGroup method getModuleGroups.

public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version, String condition) throws Exception {
    Set<DistributionModuleGroup> dmg = new HashSet<>();
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    // $NON-NLS-1$
    ComponentCondition spark1Condition = getComponentCondition(ESparkVersion.SPARK_1_6.getSparkVersion());
    // $NON-NLS-1$
    ComponentCondition spark2Condition = getComponentCondition(ESparkVersion.SPARK_2_1.getSparkVersion());
    if (condition != null) {
        ComponentCondition c = new SimpleComponentCondition(new RawExpression(condition));
        spark1Condition = new MultiComponentCondition(spark1Condition, BooleanOperator.AND, c);
        spark2Condition = new MultiComponentCondition(spark2Condition, BooleanOperator.AND, c);
    }
    String graphFramesMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_MRREQUIRED_MODULE_GROUP.getModuleName());
    String graphFramesSpark2MrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_SPARK2_MRREQUIRED_MODULE_GROUP.getModuleName());
    checkRuntimeId(graphFramesMrRequiredRuntimeId);
    checkRuntimeId(graphFramesSpark2MrRequiredRuntimeId);
    if (StringUtils.isNotBlank(graphFramesMrRequiredRuntimeId)) {
        dmg.addAll(ModuleGroupsUtils.getModuleGroups(distribution, version, spark1Condition, graphFramesMrRequiredRuntimeId, true));
    }
    if (StringUtils.isNotBlank(graphFramesSpark2MrRequiredRuntimeId)) {
        dmg.addAll(ModuleGroupsUtils.getModuleGroups(distribution, version, spark2Condition, graphFramesSpark2MrRequiredRuntimeId, true));
    }
    return dmg;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) RawExpression(org.talend.hadoop.distribution.condition.RawExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Aggregations

ComponentCondition (org.talend.hadoop.distribution.condition.ComponentCondition)19 SimpleComponentCondition (org.talend.hadoop.distribution.condition.SimpleComponentCondition)19 BasicExpression (org.talend.hadoop.distribution.condition.BasicExpression)15 HashSet (java.util.HashSet)14 MultiComponentCondition (org.talend.hadoop.distribution.condition.MultiComponentCondition)14 DistributionModuleGroup (org.talend.hadoop.distribution.DistributionModuleGroup)12 DynamicPluginAdapter (org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter)6 NestedComponentCondition (org.talend.hadoop.distribution.condition.NestedComponentCondition)5 ShowExpression (org.talend.hadoop.distribution.condition.ShowExpression)4 Set (java.util.Set)3 Expression (org.talend.hadoop.distribution.condition.Expression)3 RawExpression (org.talend.hadoop.distribution.condition.RawExpression)3 HashMap (java.util.HashMap)2 SparkBatchLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition)2 SparkStreamingLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition)2 LinkedHashSet (java.util.LinkedHashSet)1 Map (java.util.Map)1 Test (org.junit.Test)1 ComponentType (org.talend.hadoop.distribution.ComponentType)1 ESparkVersion (org.talend.hadoop.distribution.ESparkVersion)1