Search in sources :

Example 1 with DynamicPluginAdapter

use of org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter in project tbd-studio-se by Talend.

the class DynamicHDPSparkBatchModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups();
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String spark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_MODULE_GROUP.getModuleName());
    String sparkMRRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MRREQUIRED_MODULE_GROUP.getModuleName());
    String hdfsRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP.getModuleName());
    String hdfsNotSpark16RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_NOT_SPARK_1_6_MODULE_GROUP.getModuleName());
    String tezNotSpark16RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.TEZ_NOT_SPARK_1_6_MODULE_GROUP.getModuleName());
    String mapReduceRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.MAPREDUCE_MODULE_GROUP.getModuleName());
    String atlasSpark1RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_1_MODULE_GROUP.getModuleName());
    String atlasSpark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.ATLAS_SPARK_2_MODULE_GROUP.getModuleName());
    String sqoopRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_MODULE_GROUP.getModuleName());
    String sqoopParquetRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SQOOP_PARQUET_MODULE_GROUP.getModuleName());
    String hBaseRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HBASE_MODULE_GROUP.getModuleName());
    String sparkS3RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_S3_MRREQUIRED_MODULE_GROUP.getModuleName());
    checkRuntimeId(spark2RuntimeId);
    checkRuntimeId(sparkMRRequiredRuntimeId);
    checkRuntimeId(hdfsRuntimeId);
    checkRuntimeId(hdfsNotSpark16RuntimeId);
    checkRuntimeId(tezNotSpark16RuntimeId);
    checkRuntimeId(mapReduceRuntimeId);
    checkRuntimeId(atlasSpark1RuntimeId);
    checkRuntimeId(atlasSpark2RuntimeId);
    checkRuntimeId(sqoopRuntimeId);
    checkRuntimeId(sqoopParquetRuntimeId);
    checkRuntimeId(hBaseRuntimeId);
    checkRuntimeId(sparkS3RuntimeId);
    ComponentCondition useAtlas = new SimpleComponentCondition(new BasicExpression(MRConstant.USE_ATLAS));
    ComponentCondition atlasSpark1x = new MultiComponentCondition(useAtlas, BooleanOperator.AND, conditionSpark1);
    ComponentCondition atlasSpark2x = new MultiComponentCondition(useAtlas, BooleanOperator.AND, conditionSpark2);
    if (StringUtils.isNotBlank(sparkMRRequiredRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkMRRequiredRuntimeId, true, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sparkMRRequiredRuntimeId, true, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(hdfsRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsNotSpark16RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsNotSpark16RuntimeId, false, conditionNotSpark16));
    }
    if (StringUtils.isNotBlank(tezNotSpark16RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(tezNotSpark16RuntimeId, false, conditionNotSpark16));
    }
    if (StringUtils.isNotBlank(mapReduceRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(mapReduceRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(mapReduceRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(atlasSpark1RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(atlasSpark1RuntimeId, true, atlasSpark1x));
    }
    if (StringUtils.isNotBlank(atlasSpark2RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(atlasSpark2RuntimeId, true, atlasSpark2x));
    }
    if (StringUtils.isNotBlank(sqoopRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sqoopRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sqoopRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(sqoopParquetRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sqoopParquetRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sqoopParquetRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hBaseRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hBaseRuntimeId, true, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(hBaseRuntimeId, true, conditionSpark2));
    }
    if (StringUtils.isNotBlank(sparkS3RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkS3RuntimeId, true));
    }
    return moduleGroups;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 2 with DynamicPluginAdapter

use of org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter in project tbd-studio-se by Talend.

the class DynamicHDPWebHDFSModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups(distribution, version);
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String webHdfsRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.WEBHDFS_MODULE_GROUP.getModuleName());
    checkRuntimeId(webHdfsRuntimeId);
    if (StringUtils.isNotBlank(webHdfsRuntimeId)) {
        HDFSLinkedNodeCondition hdfsLinkedNodeCondition = new HDFSLinkedNodeCondition(distribution, version);
        moduleGroups.add(new DistributionModuleGroup(webHdfsRuntimeId, true, hdfsLinkedNodeCondition.getWebHDFSCondition()));
    }
    return moduleGroups;
}
Also used : HDFSLinkedNodeCondition(org.talend.hadoop.distribution.condition.common.HDFSLinkedNodeCondition) DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 3 with DynamicPluginAdapter

use of org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter in project tbd-studio-se by Talend.

the class DynamicSparkBatchModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String sparkRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MODULE_GROUP.getModuleName());
    String spark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_MODULE_GROUP.getModuleName());
    checkRuntimeId(sparkRuntimeId);
    checkRuntimeId(spark2RuntimeId);
    if (StringUtils.isNotBlank(sparkRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkRuntimeId, false, conditionSpark1));
    }
    if (StringUtils.isNotBlank(spark2RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(spark2RuntimeId, false, conditionSpark2));
    }
    return moduleGroups;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 4 with DynamicPluginAdapter

use of org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter in project tbd-studio-se by Talend.

the class DynamicCDHSparkBatchModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
    Set<DistributionModuleGroup> moduleGroupsFromSuper = super.getModuleGroups();
    if (moduleGroupsFromSuper != null && !moduleGroupsFromSuper.isEmpty()) {
        moduleGroups.addAll(moduleGroupsFromSuper);
    }
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String sparkMrRequiredRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MRREQUIRED_MODULE_GROUP.getModuleName());
    String hdfsSpark1_6RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP_SPARK1_6.getModuleName());
    String hdfsSpark2_1RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP_SPARK2_1.getModuleName());
    String hdfsCommonRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP_COMMON.getModuleName());
    String mrRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.MAPREDUCE_MODULE_GROUP.getModuleName());
    String talendClouderaNaviRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicCDHModuleGroupConstant.TALEND_CLOUDERA_CDH_NAVIGATOR.getModuleName());
    checkRuntimeId(sparkMrRequiredRuntimeId);
    checkRuntimeId(hdfsSpark1_6RuntimeId);
    checkRuntimeId(hdfsSpark2_1RuntimeId);
    checkRuntimeId(hdfsCommonRuntimeId);
    checkRuntimeId(mrRuntimeId);
    checkRuntimeId(talendClouderaNaviRuntimeId);
    if (StringUtils.isNotBlank(sparkMrRequiredRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(sparkMrRequiredRuntimeId, true, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(sparkMrRequiredRuntimeId, true, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsSpark1_6RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsSpark1_6RuntimeId, false, conditionSpark1));
    }
    if (StringUtils.isNotBlank(hdfsSpark2_1RuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsSpark2_1RuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(hdfsCommonRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(hdfsCommonRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(hdfsCommonRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(mrRuntimeId)) {
        moduleGroups.add(new DistributionModuleGroup(mrRuntimeId, false, conditionSpark1));
        moduleGroups.add(new DistributionModuleGroup(mrRuntimeId, false, conditionSpark2));
    }
    if (StringUtils.isNotBlank(talendClouderaNaviRuntimeId)) {
        ComponentCondition conditionUseNavigator = new SimpleComponentCondition(new BasicExpression(SparkBatchConstant.USE_CLOUDERA_NAVIGATOR));
        moduleGroups.add(new DistributionModuleGroup(talendClouderaNaviRuntimeId, true, conditionUseNavigator));
    }
    return moduleGroups;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) BasicExpression(org.talend.hadoop.distribution.condition.BasicExpression) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) MultiComponentCondition(org.talend.hadoop.distribution.condition.MultiComponentCondition) SimpleComponentCondition(org.talend.hadoop.distribution.condition.SimpleComponentCondition) ComponentCondition(org.talend.hadoop.distribution.condition.ComponentCondition) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Example 5 with DynamicPluginAdapter

use of org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter in project tbd-studio-se by Talend.

the class DynamicSparkStreamingModuleGroup method getModuleGroups.

@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
    Set<DistributionModuleGroup> hs = new HashSet<>();
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    String sparkRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK_MODULE_GROUP.getModuleName());
    String spark2RuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.SPARK2_MODULE_GROUP.getModuleName());
    checkRuntimeId(sparkRuntimeId);
    checkRuntimeId(spark2RuntimeId);
    if (StringUtils.isNotBlank(sparkRuntimeId)) {
        hs.add(new DistributionModuleGroup(sparkRuntimeId, false, spark1Condition));
    }
    if (StringUtils.isNotBlank(spark2RuntimeId)) {
        hs.add(new DistributionModuleGroup(spark2RuntimeId, false, spark2Condition));
    }
    return hs;
}
Also used : DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup) HashSet(java.util.HashSet)

Aggregations

DynamicPluginAdapter (org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter)65 HashSet (java.util.HashSet)43 DistributionModuleGroup (org.talend.hadoop.distribution.DistributionModuleGroup)43 IDynamicPluginConfiguration (org.talend.core.runtime.dynamic.IDynamicPluginConfiguration)10 SparkStreamingLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkStreamingLinkedNodeCondition)10 MultiComponentCondition (org.talend.hadoop.distribution.condition.MultiComponentCondition)8 Set (java.util.Set)6 NodeComponentTypeBean (org.talend.hadoop.distribution.NodeComponentTypeBean)6 ComponentCondition (org.talend.hadoop.distribution.condition.ComponentCondition)6 SimpleComponentCondition (org.talend.hadoop.distribution.condition.SimpleComponentCondition)6 BasicExpression (org.talend.hadoop.distribution.condition.BasicExpression)5 SparkBatchLinkedNodeCondition (org.talend.hadoop.distribution.condition.common.SparkBatchLinkedNodeCondition)5 IDynamicConfiguration (org.talend.core.runtime.dynamic.IDynamicConfiguration)4 IDynamicPlugin (org.talend.core.runtime.dynamic.IDynamicPlugin)4 ComponentType (org.talend.hadoop.distribution.ComponentType)4 HiveOnSparkComponent (org.talend.hadoop.distribution.component.HiveOnSparkComponent)4 SparkBatchComponent (org.talend.hadoop.distribution.component.SparkBatchComponent)4 SparkStreamingComponent (org.talend.hadoop.distribution.component.SparkStreamingComponent)4 SqoopComponent (org.talend.hadoop.distribution.component.SqoopComponent)4 NestedComponentCondition (org.talend.hadoop.distribution.condition.NestedComponentCondition)4