Search in sources :

Example 86 with DistributionModuleGroup

use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.

the class DynamicSparkBatchS3NodeModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
    Set<DistributionModuleGroup> hs = new HashSet<>();
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String sparkS3MrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_S3_MRREQUIRED_MODULE_GROUP.getModuleName());
    checkRuntimeId(sparkS3MrRequiredRuntimeId);
    if (StringUtils.isNotBlank(sparkS3MrRequiredRuntimeId)) {
        DistributionModuleGroup dmg = new DistributionModuleGroup(sparkS3MrRequiredRuntimeId, true, new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_S3_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
        hs.add(dmg);
    }
    return hs;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) SparkBatchLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 87 with DistributionModuleGroup

use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.

the class DynamicCDHGraphFramesNodeModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
    Set<DistributionModuleGroup> hs = new HashSet<>();
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String graphFramesMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.GRAPHFRAMES_MRREQUIRED_MODULE_GROUP.getModuleName());
    checkRuntimeId(graphFramesMrRequiredRuntimeId);
    if (StringUtils.isNotBlank(graphFramesMrRequiredRuntimeId)) {
        DistributionModuleGroup dmg = new DistributionModuleGroup(graphFramesMrRequiredRuntimeId, true, new SparkBatchLinkedNodeCondition(distribution, version, SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER).getCondition());
        hs.add(dmg);
    }
    return hs;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) SparkBatchLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 88 with DistributionModuleGroup

use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.

the class ModuleGroupsUtils method getStreamingModuleGroups.

/**
 * Utility method to create the collection of {@link DistributionModuleGroup} with a condition made of a
 * {@link SparkStreamingLinkedNodeCondition} and an additional raw condition
 *
 * @param distribution the distribution key
 * @param version the version key
 * @param condition a nullable additional condition
 * @param moduleName the module name
 * @param mrRequired if the module group is mrRequired
 * @return a set of {@link DistributionModuleGroup}
 */
public static Set<DistributionModuleGroup> getStreamingModuleGroups(String distribution, String version, ComponentCondition condition, String moduleName, boolean mrRequired) {
    Set<DistributionModuleGroup> hs = new HashSet<>();
    ComponentCondition distribCondition = new SparkStreamingLinkedNodeCondition(distribution, version).getCondition();
    ComponentCondition cc = null;
    if (condition != null) {
        cc = new MultiComponentCondition(condition, BooleanOperator.AND, distribCondition);
    } else {
        cc = distribCondition;
    }
    DistributionModuleGroup dmg = new DistributionModuleGroup(moduleName, mrRequired, cc);
    hs.add(dmg);
    return hs;
}
Also used : SparkStreamingLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 89 with DistributionModuleGroup

use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.

the class Spark30xDistribution method buildNodeModuleGroups.

protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroups(String distribution, String version) {
    Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> result = super.buildNodeModuleGroups(distribution, version);
    Set<DistributionModuleGroup> s3ModuleGroup = Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.S3.get(getVersion()), SparkBatchConstant.SPARK_BATCH_S3_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.S3_CONFIGURATION_COMPONENT), s3ModuleGroup);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.S3_CONFIGURATION_COMPONENT), s3ModuleGroup);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.MATCH_PREDICT_COMPONENT), Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.PARQUET.get(getVersion()), SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION));
    // spark model encoder
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.TMODEL_ENCODER_COMPONENT), Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.MODEL_ENCODER.get(getVersion()), SparkBatchConstant.SPARK_BATCH_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION));
    // spark Streaming model encoder
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.TMODEL_ENCODER_COMPONENT), Spark30xNodeModuleGroup.getModuleGroup(ModuleGroupName.MODEL_ENCODER.get(getVersion()), SparkStreamingConstant.SPARK_STREAMING_SPARKCONFIGURATION_LINKEDPARAMETER, Spark30xDistribution.SPARK_VERSION));
    return result;
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) NodeComponentTypeBean(org.talend.hadoop.distribution.NodeComponentTypeBean) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup)

Example 90 with DistributionModuleGroup

use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.

the class DBR550SparkBatchAzureNodeModuleGroupTest method testModuleGroups.

@Test
public void testModuleGroups() throws Exception {
    Map<String, String> results = new HashMap<>();
    results.put(DBR550Constant.SPARK_AZURE_MRREQUIRED_MODULE_GROUP.getModuleName(), // $NON-NLS-1$
    "((#LINK@NODE.STORAGE_CONFIGURATION.DISTRIBUTION == 'DATABRICKS') AND (#LINK@NODE.STORAGE_CONFIGURATION.SPARK_VERSION == 'Databricks_5_5'))");
    Set<DistributionModuleGroup> moduleGroups = DBR550SparkBatchAzureNodeModuleGroup.getModuleGroups(DBR550Distribution.DISTRIBUTION_NAME, DBR550Distribution.VERSION);
    assertEquals(results.size(), moduleGroups.size());
    for (DistributionModuleGroup module : moduleGroups) {
        // $NON-NLS-1$
        assertTrue("Should contain module " + module.getModuleName(), results.containsKey(module.getModuleName()));
        if (results.get(module.getModuleName()) == null) {
            assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
            "The condition of the module " + module.getModuleName() + " is not null.", results.get(module.getModuleName()) == null);
        } else {
            assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
            "The condition of the module " + module.getModuleName() + " is null, but it should be " + results.get(module.getModuleName()) + ".", // $NON-NLS-1$
            results.get(module.getModuleName()) != null);
            assertEquals(results.get(module.getModuleName()), module.getRequiredIf().getConditionString());
        }
    }
}
Also used : HashMap(java.util.HashMap) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) Test(org.junit.Test)

Aggregations

DistributionModuleGroup (org.talend.hadoop.distribution.DistributionModuleGroup)148 HashSet (java.util.HashSet)106 DynamicPluginAdapter (org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter)43 HashMap (java.util.HashMap)36 Test (org.junit.Test)36 SparkStreamingLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition)20 SparkBatchLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition)14 MultiComponentCondition (org.talend.hadoop.distribution.condition.MultiComponentCondition)13 ComponentCondition (org.talend.hadoop.distribution.condition.ComponentCondition)12 SimpleComponentCondition (org.talend.hadoop.distribution.condition.SimpleComponentCondition)12 NodeComponentTypeBean (org.talend.hadoop.distribution.NodeComponentTypeBean)11 BasicExpression (org.talend.hadoop.distribution.condition.BasicExpression)11 Set (java.util.Set)6 NestedComponentCondition (org.talend.hadoop.distribution.condition.NestedComponentCondition)4 HDFSLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.HDFSLinkedNodeCondition)3 Map (java.util.Map)2 ComponentType (org.talend.hadoop.distribution.ComponentType)2 RawExpression (org.talend.hadoop.distribution.condition.RawExpression)2 ShowExpression (org.talend.hadoop.distribution.condition.ShowExpression)2 DynamicSparkBatchKuduNodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchKuduNodeModuleGroup)2