use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class AbstractDynamicDistributionTemplate method buildNodeModuleGroupsMap.
protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroupsMap(DynamicPluginAdapter pluginAdapter) throws Exception {
Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap = new HashMap<>();
for (Map.Entry<ComponentType, IDynamicModuleGroupTemplate> entry : moduleGroupsTemplateMap.entrySet()) {
Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> groupMap = entry.getValue().getNodeModuleGroups();
if (groupMap != null && !groupMap.isEmpty()) {
for (Map.Entry<NodeComponentTypeBean, Set<DistributionModuleGroup>> groupEntry : groupMap.entrySet()) {
NodeComponentTypeBean key = groupEntry.getKey();
Set<DistributionModuleGroup> existingGroupSet = nodeModuleGroupsMap.get(key);
if (existingGroupSet != null) {
String keyStr = "";
try {
ComponentType componentType = key.getComponentType();
if (componentType != null) {
keyStr = keyStr + componentType.name();
}
keyStr = keyStr + ", " + key.getComponentName();
} catch (Exception e) {
ExceptionHandler.process(e);
} finally {
CommonExceptionHandler.warn(this.getClass().getSimpleName() + " : multiple define of [" + keyStr + "], will merge it.");
}
existingGroupSet.addAll(groupEntry.getValue());
} else {
nodeModuleGroupsMap.put(key, groupEntry.getValue());
}
}
}
}
return nodeModuleGroupsMap;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DynamicHCatalogModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String runtimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP.getModuleName());
checkRuntimeId(runtimeId);
if (StringUtils.isNotBlank(runtimeId)) {
hs.add(new DistributionModuleGroup(runtimeId));
}
return hs;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DynamicHiveModuleGroup method getModuleGroups.
@Override
public Set<DistributionModuleGroup> getModuleGroups() throws Exception {
Set<DistributionModuleGroup> hs = new HashSet<>();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
String hiveRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HIVE_MODULE_GROUP.getModuleName());
String hdfsRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HDFS_MODULE_GROUP.getModuleName());
String mrRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.MAPREDUCE_MODULE_GROUP.getModuleName());
String hiveHBaseRuntimeId = pluginAdapter.getRuntimeModuleGroupIdByTemplateId(DynamicModuleGroupConstant.HIVE_HBASE_MODULE_GROUP.getModuleName());
checkRuntimeId(hiveRuntimeId);
checkRuntimeId(hdfsRuntimeId);
checkRuntimeId(mrRuntimeId);
checkRuntimeId(hiveHBaseRuntimeId);
if (StringUtils.isNotBlank(hiveRuntimeId)) {
hs.add(new DistributionModuleGroup(hiveRuntimeId));
}
if (StringUtils.isNotBlank(hdfsRuntimeId)) {
hs.add(new DistributionModuleGroup(hdfsRuntimeId));
}
if (StringUtils.isNotBlank(mrRuntimeId)) {
hs.add(new DistributionModuleGroup(mrRuntimeId));
}
if (StringUtils.isNotBlank(hiveHBaseRuntimeId)) {
// The following condition instance stands for:
// (isShow[STORE_BY_HBASE] AND STORE_BY_HBASE=='true')
ComponentCondition hbaseLoaderCondition = new MultiComponentCondition(//
new SimpleComponentCondition(new BasicExpression(HiveConstant.HIVE_CONFIGURATION_COMPONENT_HBASEPARAMETER)), //
BooleanOperator.AND, new SimpleComponentCondition(new ShowExpression(HiveConstant.HIVE_CONFIGURATION_COMPONENT_HBASEPARAMETER)));
// The Hive components need to import some hbase libraries if the "Use HBase storage" is checked.
hs.add(new DistributionModuleGroup(hiveHBaseRuntimeId, false, hbaseLoaderCondition));
}
return hs;
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DynamicSparkBatchModuleGroupTemplate method buildNodeModuleGroups4SparkStreaming.
protected void buildNodeModuleGroups4SparkStreaming(DynamicPluginAdapter pluginAdapter, Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap, String distribution, String version) throws Exception {
Set<DistributionModuleGroup> hiveConfigurationModuleGroups = buildNodeModuleGroup4Spark4Hive(pluginAdapter, distribution, version);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.HIVE_INPUT_COMPONENT), hiveConfigurationModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.HIVE_OUTPUT_COMPONENT), hiveConfigurationModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.HIVE_CONFIGURATION_COMPONENT), hiveConfigurationModuleGroups);
Set<DistributionModuleGroup> hiveWarehouseConfigurationModuleGroups = buildNodeModuleGroup4Spark4HiveWarehouse(pluginAdapter, distribution, version);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.HIVE_WAREHOUSE_INPUT_COMPONENT), hiveWarehouseConfigurationModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.HIVE_WAREHOUSE_OUTPUT_COMPONENT), hiveWarehouseConfigurationModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.HIVE_WAREHOUSE_CONFIGURATION_COMPONENT), hiveWarehouseConfigurationModuleGroups);
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DynamicSparkStreamingModuleGroupTemplate method buildNodeModuleGroups4SparkStreaming.
protected void buildNodeModuleGroups4SparkStreaming(DynamicPluginAdapter pluginAdapter, Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap, String distribution, String version) throws Exception {
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_INPUT_COMPONENT), new DynamicSparkStreamingParquetNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version));
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_OUTPUT_COMPONENT), new DynamicSparkStreamingParquetNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version));
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_STREAM_INPUT_COMPONENT), new DynamicSparkStreamingParquetNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version));
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.S3_CONFIGURATION_COMPONENT), new DynamicSparkStreamingS3NodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version));
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkBatchConstant.AZURE_CONFIGURATION_COMPONENT), new DynamicSparkBatchAzureNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version));
// Kinesis
Set<DistributionModuleGroup> kinesisNodeModuleGroups = buildNodeModuleGroups4SparkStreaming4Kinesis(pluginAdapter, distribution, version);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT), kinesisNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT), kinesisNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT), kinesisNodeModuleGroups);
// Flume
Set<DistributionModuleGroup> flumeNodeModuleGroups = new DynamicSparkStreamingFlumeNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.FLUME_INPUT_COMPONENT), flumeNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.FLUME_OUTPUT_COMPONENT), flumeNodeModuleGroups);
// Kafka
Set<DistributionModuleGroup> kafkaAssemblyModuleGroups = buildNodeModuleGroups4SparkStreaming4KafkaAssembly(pluginAdapter, distribution, version);
Set<DistributionModuleGroup> kafkaAvroModuleGroups = buildNodeModuleGroups4SparkStreaming4KafkaAvro(pluginAdapter, distribution, version);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_INPUT_COMPONENT), kafkaAssemblyModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT), kafkaAvroModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_OUTPUT_COMPONENT), kafkaAssemblyModuleGroups);
// DynamoDB ...
Set<DistributionModuleGroup> dynamoDBNodeModuleGroups = buildNodeModuleGroup4Spark4DynamoDB(pluginAdapter, distribution, version, // $NON-NLS-1$
"USE_EXISTING_CONNECTION == 'false'");
Set<DistributionModuleGroup> dynamoDBConfigurationModuleGroups = buildNodeModuleGroup4Spark4DynamoDB(pluginAdapter, distribution, version, null);
// ... in Spark streaming
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_INPUT_COMPONENT), dynamoDBNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_OUTPUT_COMPONENT), dynamoDBNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_CONFIGURATION_COMPONENT), dynamoDBConfigurationModuleGroups);
}
Aggregations