use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DBR550SparkStreamingModuleGroupTest method testModuleGroups.
@Test
public void testModuleGroups() throws Exception {
Map<String, String> results = new HashMap<>();
// $NON-NLS-1$
results.put(DBR550Constant.SPARK_MRREQUIRED_MODULE_GROUP.getModuleName(), null);
results.put(DBR550Constant.SPARK_STREAMING_MRREQUIRED_MODULE_GROUP.getModuleName(), // $NON-NLS-1$
null);
// $NON-NLS-1$
results.put(DBR550Constant.BIGDATALAUNCHER_MODULE_GROUP.getModuleName(), null);
// $NON-NLS-1$
results.put(DBR550Constant.SPARK_HIVE_MRREQUIRED_MODULE_GROUP.getModuleName(), null);
Set<DistributionModuleGroup> moduleGroups = DBR550SparkStreamingModuleGroup.getModuleGroups();
assertEquals(results.size(), moduleGroups.size());
for (DistributionModuleGroup module : moduleGroups) {
// $NON-NLS-1$
assertTrue("Should contain module " + module.getModuleName(), results.containsKey(module.getModuleName()));
if (results.get(module.getModuleName()) == null) {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is not null.", results.get(module.getModuleName()) == null);
} else {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is null, but it should be " + results.get(module.getModuleName()) + ".", // $NON-NLS-1$
results.get(module.getModuleName()) != null);
assertEquals(results.get(module.getModuleName()), module.getRequiredIf().getConditionString());
}
}
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DBR640SparkStreamingModuleGroupTest method testModuleGroups.
@Test
public void testModuleGroups() throws Exception {
Map<String, String> results = new HashMap<>();
// $NON-NLS-1$
results.put(DBR640Constant.SPARK_LIB_MRREQUIRED_DBR640.getModuleName(), null);
results.put(DBR640Constant.SPARK_STREAMING_LIB_MRREQUIRED_DBR640.getModuleName(), // $NON-NLS-1$
null);
// $NON-NLS-1$
results.put(DBR640Constant.BIGDATA_LAUNCHER_LIB_DBR640.getModuleName(), null);
// $NON-NLS-1$
results.put(DBR640Constant.HIVEONSPARK_LIB_MRREQUIRED_DBR640.getModuleName(), null);
Set<DistributionModuleGroup> moduleGroups = DBR640SparkStreamingModuleGroup.getModuleGroups();
assertEquals(results.size(), moduleGroups.size());
for (DistributionModuleGroup module : moduleGroups) {
// $NON-NLS-1$
assertTrue("Should contain module " + module.getModuleName(), results.containsKey(module.getModuleName()));
if (results.get(module.getModuleName()) == null) {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is not null.", results.get(module.getModuleName()) == null);
} else {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is null, but it should be " + results.get(module.getModuleName()) + ".", // $NON-NLS-1$
results.get(module.getModuleName()) != null);
assertEquals(results.get(module.getModuleName()), module.getRequiredIf().getConditionString());
}
}
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DBR73xHiveOnSparkModuleGroupTest method testModuleGroups.
@Test
public void testModuleGroups() throws Exception {
Map<String, String> expected = new HashMap<>();
expected.put(DBR73xConstant.HIVEONSPARK_LIB_MRREQUIRED_DBR73X.getModuleName(), null);
Set<DistributionModuleGroup> moduleGroups = DBR73xHiveOnSparkModuleGroup.getModuleGroups();
assertEquals(expected.size(), moduleGroups.size());
moduleGroups.iterator();
for (DistributionModuleGroup module : moduleGroups) {
assertTrue("Should contain module " + module.getModuleName(), expected.containsKey(module.getModuleName()));
// $NON-NLS-1$
if (expected.get(module.getModuleName()) == null) {
assertTrue("The condition of the module " + module.getModuleName() + " is not null.", // $NON-NLS-1$ //$NON-NLS-2$
expected.get(module.getModuleName()) == null);
} else {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is null, but it should be " + expected.get(module.getModuleName()) + ".", expected.get(module.getModuleName()) != null);
// $NON-NLS-1$
assertEquals(expected.get(module.getModuleName()), module.getRequiredIf().getConditionString());
}
}
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class HDInsight40SparkBatchModuleGroupTest method testModuleGroups.
@Test
public void testModuleGroups() throws Exception {
Map<String, String> results = new HashMap<>();
// $NON-NLS-1$
results.put(HDInsight40Constant.SPARK23_MODULE_GROUP.getModuleName(), "(SUPPORTED_SPARK_VERSION=='SPARK_2_3_x')");
// $NON-NLS-1$
results.put(HDInsight40Constant.SPARK24_MODULE_GROUP.getModuleName(), "(SUPPORTED_SPARK_VERSION=='SPARK_2_4_x')");
// $NON-NLS-1$
results.put(HDInsight40Constant.BIGDATALAUNCHER_MODULE_GROUP.getModuleName(), null);
// $NON-NLS-1$
results.put(HDInsight40Constant.HDINSIGHT400COMMON_MODULE_GROUP.getModuleName(), null);
Set<DistributionModuleGroup> moduleGroups = HDInsight40SparkBatchModuleGroup.getModuleGroups();
assertEquals(results.size(), moduleGroups.size());
moduleGroups.iterator();
for (DistributionModuleGroup module : moduleGroups) {
// $NON-NLS-1$
assertTrue("Should contain module " + module.getModuleName(), results.containsKey(module.getModuleName()));
if (results.get(module.getModuleName()) == null) {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is not null.", results.get(module.getModuleName()) == null);
} else {
assertTrue(// $NON-NLS-1$ //$NON-NLS-2$
"The condition of the module " + module.getModuleName() + " is null, but it should be " + results.get(module.getModuleName()) + ".", // $NON-NLS-1$
results.get(module.getModuleName()) != null);
assertEquals(results.get(module.getModuleName()), module.getRequiredIf().getConditionString());
}
}
}
use of org.talend.hadoop.distribution.DistributionModuleGroup in project tbd-studio-se by Talend.
the class DBR640KinesisNodeModuleGroup method getModuleGroups.
public static Set<DistributionModuleGroup> getModuleGroups(String distribution, String version) {
Set<DistributionModuleGroup> moduleGroups = new HashSet<>();
moduleGroups.add(new DistributionModuleGroup(DBR640Constant.SPARK_STREAMING_LIB_KINESIS_DBR640.getModuleName(), true, new SparkStreamingLinkedNodeCondition(distribution, version).getCondition()));
return moduleGroups;
}
Aggregations