Search in sources :

Example 21 with NodeComponentTypeBean

use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.

the class DynamicCDHSparkBatchModuleGroupTemplate method buildNodeModuleGroups4SparkBatch4Kudu.

@Override
protected void buildNodeModuleGroups4SparkBatch4Kudu(DynamicPluginAdapter pluginAdapter, Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap, String distribution, String version) throws Exception {
    // Kudu ...
    Set<DistributionModuleGroup> kuduNodeModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, // $NON-NLS-1$
    "USE_EXISTING_CONNECTION == 'false'");
    Set<DistributionModuleGroup> kuduConfigurationModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, null);
    // ... in Spark batch
    nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_INPUT_COMPONENT), kuduNodeModuleGroups);
    nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_OUTPUT_COMPONENT), kuduNodeModuleGroups);
    nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_CONFIGURATION_COMPONENT), kuduConfigurationModuleGroups);
}
Also used : DynamicSparkBatchKuduNodeModuleGroup(org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchKuduNodeModuleGroup) NodeComponentTypeBean(org.talend.hadoop.distribution.NodeComponentTypeBean) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup)

Example 22 with NodeComponentTypeBean

use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.

the class DynamicSparkStreamingModuleGroupTemplate method getNodeModuleGroups.

@Override
public Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> getNodeModuleGroups() throws Exception {
    Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroups = super.getNodeModuleGroups();
    DynamicPluginAdapter pluginAdapter = getPluginAdapter();
    IDynamicPluginConfiguration configuration = pluginAdapter.getPluginConfiguration();
    String distribution = configuration.getDistribution();
    String version = configuration.getId();
    buildNodeModuleGroups4SparkStreaming(pluginAdapter, nodeModuleGroups, distribution, version);
    return nodeModuleGroups;
}
Also used : Set(java.util.Set) DynamicPluginAdapter(org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter) NodeComponentTypeBean(org.talend.hadoop.distribution.NodeComponentTypeBean) IDynamicPluginConfiguration(org.talend.core.runtime.dynamic.IDynamicPluginConfiguration)

Example 23 with NodeComponentTypeBean

use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.

the class DynamicCDPSparkBatchModuleGroupTemplate method buildNodeModuleGroups4SparkBatch4Kudu.

@Override
protected void buildNodeModuleGroups4SparkBatch4Kudu(DynamicPluginAdapter pluginAdapter, Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroupsMap, String distribution, String version) throws Exception {
    // Kudu ...
    Set<DistributionModuleGroup> kuduNodeModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, // $NON-NLS-1$
    "USE_EXISTING_CONNECTION == 'false'");
    Set<DistributionModuleGroup> kuduConfigurationModuleGroups = new DynamicSparkBatchKuduNodeModuleGroup(pluginAdapter).getModuleGroups(distribution, version, null);
    // ... in Spark batch
    nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_INPUT_COMPONENT), kuduNodeModuleGroups);
    nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_OUTPUT_COMPONENT), kuduNodeModuleGroups);
    nodeModuleGroupsMap.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.KUDU_CONFIGURATION_COMPONENT), kuduConfigurationModuleGroups);
}
Also used : DynamicSparkBatchKuduNodeModuleGroup(org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchKuduNodeModuleGroup) NodeComponentTypeBean(org.talend.hadoop.distribution.NodeComponentTypeBean) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup)

Example 24 with NodeComponentTypeBean

use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.

the class DBR640Distribution method buildNodeModuleGroups.

protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroups(String distribution, String version) {
    Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> result = super.buildNodeModuleGroups(distribution, version);
    // Azure
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.AZURE_CONFIGURATION_COMPONENT), DBR640AzureNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.AZURE_CONFIGURATION_COMPONENT), DBR640AzureNodeModuleGroup.getModuleGroups(distribution, version));
    // Kinesis
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT), DBR640KinesisNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT), DBR640KinesisNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT), DBR640KinesisNodeModuleGroup.getModuleGroups(distribution, version));
    return result;
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) NodeComponentTypeBean(org.talend.hadoop.distribution.NodeComponentTypeBean)

Example 25 with NodeComponentTypeBean

use of org.talend.hadoop.distribution.NodeComponentTypeBean in project tbd-studio-se by Talend.

the class EMR5290Distribution method buildNodeModuleGroups.

protected Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> buildNodeModuleGroups(String distribution, String version) {
    Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> result = super.buildNodeModuleGroups(distribution, version);
    // WebHDFS
    Set<DistributionModuleGroup> webHDFSNodeModuleGroups = EMR5290WebHDFSModuleGroup.getModuleGroups(distribution, version);
    for (String hdfsComponent : HDFSConstant.HDFS_COMPONENTS) {
        result.put(new NodeComponentTypeBean(ComponentType.HDFS, hdfsComponent), webHDFSNodeModuleGroups);
    }
    // Spark Batch Parquet nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_INPUT_COMPONENT), EMR5290SparkBatchParquetNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_OUTPUT_COMPONENT), EMR5290SparkBatchParquetNodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Batch tSQLRow nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.SPARK_SQL_ROW_COMPONENT), EMR5290SparkBatchSqlRowHiveNodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Batch S3 nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.S3_CONFIGURATION_COMPONENT), EMR5290SparkBatchS3NodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Batch DQ matching
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.MATCH_PREDICT_COMPONENT), EMR5290GraphFramesNodeModuleGroup.getModuleGroups(distribution, version));
    // DynamoDB nodes ...
    Set<DistributionModuleGroup> dynamoDBNodeModuleGroups = EMR5290SparkDynamoDBNodeModuleGroup.getModuleGroups(distribution, version, "USE_EXISTING_CONNECTION == 'false'");
    Set<DistributionModuleGroup> dynamoDBConfigurationModuleGroups = EMR5290SparkDynamoDBNodeModuleGroup.getModuleGroups(distribution, version, null);
    // ... in Spark batch
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.DYNAMODB_INPUT_COMPONENT), dynamoDBNodeModuleGroups);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.DYNAMODB_OUTPUT_COMPONENT), dynamoDBNodeModuleGroups);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.DYNAMODB_CONFIGURATION_COMPONENT), dynamoDBConfigurationModuleGroups);
    // ... in Spark streaming
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_INPUT_COMPONENT), dynamoDBNodeModuleGroups);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_OUTPUT_COMPONENT), dynamoDBNodeModuleGroups);
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.DYNAMODB_CONFIGURATION_COMPONENT), dynamoDBConfigurationModuleGroups);
    // Spark Streaming Parquet nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_INPUT_COMPONENT), EMR5290SparkStreamingParquetNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_OUTPUT_COMPONENT), EMR5290SparkStreamingParquetNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_STREAM_INPUT_COMPONENT), EMR5290SparkStreamingParquetNodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Streaming tSQLRow nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.SPARK_SQL_ROW_COMPONENT), EMR5290SparkStreamingSqlRowHiveNodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Streaming S3 nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.S3_CONFIGURATION_COMPONENT), EMR5290SparkStreamingS3NodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Streaming Kinesis nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT), EMR5290SparkStreamingKinesisNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT), EMR5290SparkStreamingKinesisNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT), EMR5290SparkStreamingKinesisNodeModuleGroup.getModuleGroups(distribution, version));
    // Spark Streaming Kafka nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_INPUT_COMPONENT), EMR5290SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT), EMR5290SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_OUTPUT_COMPONENT), EMR5290SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups(distribution, version));
    // Spark Streaming Flume nodes
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.FLUME_INPUT_COMPONENT), EMR5290SparkStreamingFlumeNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.FLUME_OUTPUT_COMPONENT), EMR5290SparkStreamingFlumeNodeModuleGroup.getModuleGroups(distribution, version));
    // Azure
    result.put(new NodeComponentTypeBean(ComponentType.SPARKBATCH, SparkBatchConstant.AZURE_CONFIGURATION_COMPONENT), EMR5290SparkBatchAzureNodeModuleGroup.getModuleGroups(distribution, version));
    result.put(new NodeComponentTypeBean(ComponentType.SPARKSTREAMING, SparkStreamingConstant.AZURE_CONFIGURATION_COMPONENT), EMR5290SparkBatchAzureNodeModuleGroup.getModuleGroups(distribution, version));
    return result;
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) NodeComponentTypeBean(org.talend.hadoop.distribution.NodeComponentTypeBean) DistributionModuleGroup(org.talend.hadoop.distribution.DistributionModuleGroup)

Aggregations

NodeComponentTypeBean (org.talend.hadoop.distribution.NodeComponentTypeBean)25 Set (java.util.Set)19 DistributionModuleGroup (org.talend.hadoop.distribution.DistributionModuleGroup)11 HashMap (java.util.HashMap)7 HashSet (java.util.HashSet)7 IDynamicPluginConfiguration (org.talend.core.runtime.dynamic.IDynamicPluginConfiguration)6 DynamicPluginAdapter (org.talend.hadoop.distribution.dynamic.adapter.DynamicPluginAdapter)6 DynamicSparkBatchKuduNodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchKuduNodeModuleGroup)2 Map (java.util.Map)1 ComponentType (org.talend.hadoop.distribution.ComponentType)1 DynamicSparkBatchAzureNodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchAzureNodeModuleGroup)1 DynamicSparkBatchParquetNodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchParquetNodeModuleGroup)1 DynamicSparkBatchS3NodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkbatch.DynamicSparkBatchS3NodeModuleGroup)1 DynamicSparkStreamingFlumeNodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkstreaming.DynamicSparkStreamingFlumeNodeModuleGroup)1 DynamicSparkStreamingParquetNodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkstreaming.DynamicSparkStreamingParquetNodeModuleGroup)1 DynamicSparkStreamingS3NodeModuleGroup (org.talend.hadoop.distribution.dynamic.template.modulegroup.node.sparkstreaming.DynamicSparkStreamingS3NodeModuleGroup)1