use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.
the class DynamicCDHSparkBatchModuleGroupTemplate method buildNodeModuleGroups4SparkBatch4Kudu.
@Override
protected void buildNodeModuleGroups4SparkBatch4Kudu(DynamicPluginAdapter pluginAdapter, Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap, String distribution, String version) throws Exception {
// Kudu ...
Set<DistributionModuleGroup> kuduNodeModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, // $NON-NLS-1$
"USE_EXISTING_CONNECTION == 'false'");
Set<DistributionModuleGroup> kuduConfigurationModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, null);
// ... in Spark batch
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_INPUT_COMPONENT), kuduNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_OUTPUT_COMPONENT), kuduNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_CONFIGURATION_COMPONENT), kuduConfigurationModuleGroups);
}
use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.
the class DynamicSparkStreamingModuleGroupTemplate method getNodeModuleGroups.
@Override
public Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> getNodeModuleGroups() throws Exception {
Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroups = super.getNodeModuleGroups();
DynamicPluginAdapter pluginAdapter = getPluginAdapter();
IDynamicPluginConfiguration configuration = pluginAdapter.getPluginConfiguration();
String distribution = configuration.getDistribution();
String version = configuration.getId();
buildNodeModuleGroups4SparkStreaming(pluginAdapter, nodeModuleGroups, distribution, version);
return nodeModuleGroups;
}
use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.
the class DynamicCDPSparkBatchModuleGroupTemplate method buildNodeModuleGroups4SparkBatch4Kudu.
@Override
protected void buildNodeModuleGroups4SparkBatch4Kudu(DynamicPluginAdapter pluginAdapter, Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap, String distribution, String version) throws Exception {
// Kudu ...
Set<DistributionModuleGroup> kuduNodeModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, // $NON-NLS-1$
"USE_EXISTING_CONNECTION == 'false'");
Set<DistributionModuleGroup> kuduConfigurationModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, null);
// ... in Spark batch
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_INPUT_COMPONENT), kuduNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_OUTPUT_COMPONENT), kuduNodeModuleGroups);
nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_CONFIGURATION_COMPONENT), kuduConfigurationModuleGroups);
}
use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.
the class DBR640Distribution method buildNodeModuleGroups.
protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroups(String distribution, String version) {
Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> result = super.buildNodeModuleGroups(distribution, version);
// Azure
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.AZURE_CONFIGURATION_COMPONENT), DBR640AzureNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.AZURE_CONFIGURATION_COMPONENT), DBR640AzureNodeModuleGroup.getModuleGroups(distribution, version));
// Kinesis
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT), DBR640KinesisNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT), DBR640KinesisNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT), DBR640KinesisNodeModuleGroup.getModuleGroups(distribution, version));
return result;
}
use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.
the class EMR5290Distribution method buildNodeModuleGroups.
protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroups(String distribution, String version) {
Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> result = super.buildNodeModuleGroups(distribution, version);
// WebHDFS
Set<DistributionModuleGroup> webHDFSNodeModuleGroups = EMR5290WebHDFSModuleGroup.getModuleGroups(distribution, version);
for (String hdfsComponent : HDFSConstant.HDFS_COMPONENTS) {
result.put(new NodeComponentTypeBean(ComponentType.HDFS, hdfsComponent), webHDFSNodeModuleGroups);
}
// Spark Batch Parquet nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_INPUT_COMPONENT), EMR5290SparkBatchParquetNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_OUTPUT_COMPONENT), EMR5290SparkBatchParquetNodeModuleGroup.getModuleGroups(distribution, version));
// Spark Batch tSQLRow nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.SPARK_SQL_ROW_COMPONENT), EMR5290SparkBatchSqlRowHiveNodeModuleGroup.getModuleGroups(distribution, version));
// Spark Batch S3 nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.S3_CONFIGURATION_COMPONENT), EMR5290SparkBatchS3NodeModuleGroup.getModuleGroups(distribution, version));
// Spark Batch DQ matching
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.MATCH_PREDICT_COMPONENT), EMR5290GraphFramesNodeModuleGroup.getModuleGroups(distribution, version));
// DynamoDB nodes ...
Set<DistributionModuleGroup> dynamoDBNodeModuleGroups = EMR5290SparkDynamoDBNodeModuleGroup.getModuleGroups(distribution, version, "USE_EXISTING_CONNECTION == 'false'");
Set<DistributionModuleGroup> dynamoDBConfigurationModuleGroups = EMR5290SparkDynamoDBNodeModuleGroup.getModuleGroups(distribution, version, null);
// ... in Spark batch
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.DYNAMODB_INPUT_COMPONENT), dynamoDBNodeModuleGroups);
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.DYNAMODB_OUTPUT_COMPONENT), dynamoDBNodeModuleGroups);
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.DYNAMODB_CONFIGURATION_COMPONENT), dynamoDBConfigurationModuleGroups);
// ... in Spark streaming
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_INPUT_COMPONENT), dynamoDBNodeModuleGroups);
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_OUTPUT_COMPONENT), dynamoDBNodeModuleGroups);
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_CONFIGURATION_COMPONENT), dynamoDBConfigurationModuleGroups);
// Spark Streaming Parquet nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_INPUT_COMPONENT), EMR5290SparkStreamingParquetNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_OUTPUT_COMPONENT), EMR5290SparkStreamingParquetNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_STREAM_INPUT_COMPONENT), EMR5290SparkStreamingParquetNodeModuleGroup.getModuleGroups(distribution, version));
// Spark Streaming tSQLRow nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.SPARK_SQL_ROW_COMPONENT), EMR5290SparkStreamingSqlRowHiveNodeModuleGroup.getModuleGroups(distribution, version));
// Spark Streaming S3 nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.S3_CONFIGURATION_COMPONENT), EMR5290SparkStreamingS3NodeModuleGroup.getModuleGroups(distribution, version));
// Spark Streaming Kinesis nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT), EMR5290SparkStreamingKinesisNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT), EMR5290SparkStreamingKinesisNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT), EMR5290SparkStreamingKinesisNodeModuleGroup.getModuleGroups(distribution, version));
// Spark Streaming Kafka nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_INPUT_COMPONENT), EMR5290SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT), EMR5290SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_OUTPUT_COMPONENT), EMR5290SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups(distribution, version));
// Spark Streaming Flume nodes
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.FLUME_INPUT_COMPONENT), EMR5290SparkStreamingFlumeNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.FLUME_OUTPUT_COMPONENT), EMR5290SparkStreamingFlumeNodeModuleGroup.getModuleGroups(distribution, version));
// Azure
result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.AZURE_CONFIGURATION_COMPONENT), EMR5290SparkBatchAzureNodeModuleGroup.getModuleGroups(distribution, version));
result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.AZURE_CONFIGURATION_COMPONENT), EMR5290SparkBatchAzureNodeModuleGroup.getModuleGroups(distribution, version));
return result;
}
Aggregations