use of org.talend.hadoop.distribution.component.HadoopComponent in project tbd-studio-se by Talend.
the class HDInsight40DistributionTest method testHDInsight40Distribution.
@Test
public void testHDInsight40Distribution() throws Exception {
HadoopComponent distribution = new HDInsight40Distribution();
assertNotNull(distribution.getDistributionName());
assertNotNull(distribution.getVersionName(null));
assertFalse(distribution.doSupportS3());
assertEquals(HDInsight40Distribution.DISTRIBUTION_NAME, distribution.getDistribution());
assertEquals(HDInsight40Distribution.VERSION, distribution.getVersion());
assertEquals(EHadoopVersion.HADOOP_3, distribution.getHadoopVersion());
assertFalse(distribution.doSupportKerberos());
assertFalse(distribution.doSupportUseDatanodeHostname());
assertFalse(distribution.doSupportGroup());
assertFalse(distribution.doSupportOldImportMode());
assertFalse(distribution instanceof HDFSComponent);
assertEquals(DEFAULT_YARN_APPLICATION_CLASSPATH, ((MRComponent) distribution).getYarnApplicationClasspath());
assertFalse(distribution instanceof HBaseComponent);
assertFalse(distribution instanceof SqoopComponent);
// Spark Batch
assertTrue(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_4_X));
assertTrue(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_3_X));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_1));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_0));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_6));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_5));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_4));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_3));
assertFalse(((SparkBatchComponent) distribution).doSupportDynamicMemoryAllocation());
assertFalse(((SparkBatchComponent) distribution).isExecutedThroughSparkJobServer());
assertTrue(((SparkBatchComponent) distribution).isExecutedThroughLivy());
assertFalse(((SparkBatchComponent) distribution).doSupportSparkStandaloneMode());
assertFalse(((SparkBatchComponent) distribution).doSupportSparkYarnClientMode());
assertTrue(((SparkBatchComponent) distribution).doSupportSparkYarnClusterMode());
// Spark Streaming
assertTrue(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_4_X));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_1));
assertTrue(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_3_X));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_0));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_6));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_5));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_4));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_3));
assertFalse(((SparkStreamingComponent) distribution).doSupportDynamicMemoryAllocation());
assertFalse(((SparkStreamingComponent) distribution).isExecutedThroughSparkJobServer());
assertTrue(((SparkStreamingComponent) distribution).isExecutedThroughLivy());
assertFalse(((SparkStreamingComponent) distribution).doSupportCheckpointing());
assertFalse(((SparkStreamingComponent) distribution).doSupportSparkStandaloneMode());
assertFalse(((SparkStreamingComponent) distribution).doSupportSparkYarnClientMode());
assertTrue(((SparkStreamingComponent) distribution).doSupportSparkYarnClusterMode());
assertFalse(((SparkStreamingComponent) distribution).doSupportBackpressure());
// Hive
assertFalse(((HiveComponent) distribution).doSupportHive1());
assertTrue(((HiveComponent) distribution).doSupportHive2());
assertTrue(((HiveComponent) distribution).doSupportTezForHive());
assertFalse(((HiveComponent) distribution).doSupportHBaseForHive());
assertFalse(((HiveComponent) distribution).doSupportSSL());
assertTrue(((HiveComponent) distribution).doSupportORCFormat());
assertTrue(((HiveComponent) distribution).doSupportAvroFormat());
assertTrue(((HiveComponent) distribution).doSupportParquetFormat());
assertFalse(((HiveComponent) distribution).doSupportStoreAsParquet());
assertFalse(distribution instanceof HCatalogComponent);
assertFalse(distribution instanceof ImpalaComponent);
assertTrue(distribution.doSupportHDFSEncryption());
assertTrue(distribution.doSupportCreateServiceConnection());
assertTrue((distribution.getNecessaryServiceName() == null ? 0 : distribution.getNecessaryServiceName().size()) == 0);
}
use of org.talend.hadoop.distribution.component.HadoopComponent in project tbd-studio-se by Talend.
the class SynapseDistributionTest method testSynapseDistribution.
@Test
public void testSynapseDistribution() throws Exception {
HadoopComponent distribution = new SynapseDistribution();
assertNotNull(distribution.getDistributionName());
assertNotNull(distribution.getVersionName(null));
assertFalse(distribution.doSupportS3());
assertEquals(SynapseDistribution.DISTRIBUTION_NAME, distribution.getDistribution());
assertEquals(SynapseDistribution.VERSION, distribution.getVersion());
assertEquals(EHadoopVersion.HADOOP_3, distribution.getHadoopVersion());
assertFalse(distribution.doSupportKerberos());
assertTrue(distribution.doSupportUseDatanodeHostname());
assertFalse(distribution.doSupportGroup());
assertFalse(distribution.doSupportOldImportMode());
assertFalse(((MRComponent) distribution).isExecutedThroughWebHCat());
assertFalse(((MRComponent) distribution).doSupportCrossPlatformSubmission());
assertFalse(((MRComponent) distribution).doSupportImpersonation());
assertEquals(((MRComponent) distribution).getYarnApplicationClasspath(), YARN_APPLICATION_CLASSPATH);
assertFalse(distribution instanceof HBaseComponent);
assertFalse(distribution instanceof SqoopComponent);
assertFalse(distribution instanceof HCatalogComponent);
assertFalse(distribution instanceof ImpalaComponent);
assertTrue(distribution.doSupportAzureDataLakeStorageGen2());
assertTrue(distribution.doSupportWebHDFS());
}
use of org.talend.hadoop.distribution.component.HadoopComponent in project tbd-studio-se by Talend.
the class DistributionHelperTest method test_doSupportService_HadoopComponent.
@Test
public void test_doSupportService_HadoopComponent() {
HadoopComponent hadoopComponent = Mockito.mock(HadoopComponent.class);
DistributionBean distribution = Mockito.mock(DistributionBean.class);
Assert.assertTrue(DistributionHelper.doSupportService(new DistributionVersion(hadoopComponent, distribution, "ABC_10", "Abc 1.0"), IHDConstants.SERVICE_HADOOP));
// because the SparkComponent is sub class of HadoopComponent
SparkComponent sparkComponent = Mockito.mock(SparkComponent.class);
Assert.assertTrue(DistributionHelper.doSupportService(new DistributionVersion(sparkComponent, distribution, "ABC_10", "Abc 1.0"), IHDConstants.SERVICE_HADOOP));
}
use of org.talend.hadoop.distribution.component.HadoopComponent in project tbd-studio-se by Talend.
the class DistributionHelperTest method test_doSupportService_SparkComponent.
@Test
public void test_doSupportService_SparkComponent() {
HadoopComponent hadoopComponent = Mockito.mock(HadoopComponent.class);
DistributionBean distribution = Mockito.mock(DistributionBean.class);
Assert.assertFalse(DistributionHelper.doSupportService(new DistributionVersion(hadoopComponent, distribution, "ABC_10", "Abc 1.0"), IHDConstants.SERVICE_SPARK));
SparkComponent sparkComponent = Mockito.mock(SparkComponent.class);
Assert.assertTrue(DistributionHelper.doSupportService(new DistributionVersion(sparkComponent, distribution, "ABC_10", "Abc 1.0"), IHDConstants.SERVICE_SPARK));
// because the SparkStreamingComponent is sub class of SparkComponent
SparkStreamingComponent sparkStreamingComponent = Mockito.mock(SparkStreamingComponent.class);
Assert.assertTrue(DistributionHelper.doSupportService(new DistributionVersion(sparkStreamingComponent, distribution, "ABC_10", "Abc 1.0"), IHDConstants.SERVICE_SPARK));
}
use of org.talend.hadoop.distribution.component.HadoopComponent in project tbd-studio-se by Talend.
the class AbstractTest4DefaultConfiguration method testDefaultConfiguration_Existed.
// @Test
public void testDefaultConfiguration_Existed() throws JSONException {
HadoopComponent hadoopComponent = getHadoopComponent();
final String distribution = hadoopComponent.getDistribution();
String version = hadoopComponent.getVersion();
if (version == null) {
version = "";
}
String notFoundMessages = MessageFormat.format("The default configuration of the version \"{0}\" for \"{1}\" is not found", version, distribution);
// get all
String wholeConfig = hadoopComponent.getDefaultConfig(distribution, "");
assertNotNull(notFoundMessages, wholeConfig);
JSONObject wholeJson = new JSONObject(wholeConfig);
// should existed one key at least.
assertTrue(notFoundMessages, wholeJson.length() > 0);
}
Aggregations