use of org.talend.hadoop.distribution.hdinsight400.HDInsight40Distribution in project tbd-studio-se by Talend.
the class HDInsight40DistributionTest method testHDInsight40Distribution.
@Test
public void testHDInsight40Distribution() throws Exception {
HadoopComponent distribution = new HDInsight40Distribution();
assertNotNull(distribution.getDistributionName());
assertNotNull(distribution.getVersionName(null));
assertFalse(distribution.doSupportS3());
assertEquals(HDInsight40Distribution.DISTRIBUTION_NAME, distribution.getDistribution());
assertEquals(HDInsight40Distribution.VERSION, distribution.getVersion());
assertEquals(EHadoopVersion.HADOOP_3, distribution.getHadoopVersion());
assertFalse(distribution.doSupportKerberos());
assertFalse(distribution.doSupportUseDatanodeHostname());
assertFalse(distribution.doSupportGroup());
assertFalse(distribution.doSupportOldImportMode());
assertFalse(distribution instanceof HDFSComponent);
assertEquals(DEFAULT_YARN_APPLICATION_CLASSPATH, ((MRComponent) distribution).getYarnApplicationClasspath());
assertFalse(distribution instanceof HBaseComponent);
assertFalse(distribution instanceof SqoopComponent);
// Spark Batch
assertTrue(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_4_X));
assertTrue(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_3_X));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_1));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_0));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_6));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_5));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_4));
assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_3));
assertFalse(((SparkBatchComponent) distribution).doSupportDynamicMemoryAllocation());
assertFalse(((SparkBatchComponent) distribution).isExecutedThroughSparkJobServer());
assertTrue(((SparkBatchComponent) distribution).isExecutedThroughLivy());
assertFalse(((SparkBatchComponent) distribution).doSupportSparkStandaloneMode());
assertFalse(((SparkBatchComponent) distribution).doSupportSparkYarnClientMode());
assertTrue(((SparkBatchComponent) distribution).doSupportSparkYarnClusterMode());
// Spark Streaming
assertTrue(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_4_X));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_1));
assertTrue(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_3_X));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_0));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_6));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_5));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_4));
assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_3));
assertFalse(((SparkStreamingComponent) distribution).doSupportDynamicMemoryAllocation());
assertFalse(((SparkStreamingComponent) distribution).isExecutedThroughSparkJobServer());
assertTrue(((SparkStreamingComponent) distribution).isExecutedThroughLivy());
assertFalse(((SparkStreamingComponent) distribution).doSupportCheckpointing());
assertFalse(((SparkStreamingComponent) distribution).doSupportSparkStandaloneMode());
assertFalse(((SparkStreamingComponent) distribution).doSupportSparkYarnClientMode());
assertTrue(((SparkStreamingComponent) distribution).doSupportSparkYarnClusterMode());
assertFalse(((SparkStreamingComponent) distribution).doSupportBackpressure());
// Hive
assertFalse(((HiveComponent) distribution).doSupportHive1());
assertTrue(((HiveComponent) distribution).doSupportHive2());
assertTrue(((HiveComponent) distribution).doSupportTezForHive());
assertFalse(((HiveComponent) distribution).doSupportHBaseForHive());
assertFalse(((HiveComponent) distribution).doSupportSSL());
assertTrue(((HiveComponent) distribution).doSupportORCFormat());
assertTrue(((HiveComponent) distribution).doSupportAvroFormat());
assertTrue(((HiveComponent) distribution).doSupportParquetFormat());
assertFalse(((HiveComponent) distribution).doSupportStoreAsParquet());
assertFalse(distribution instanceof HCatalogComponent);
assertFalse(distribution instanceof ImpalaComponent);
assertTrue(distribution.doSupportHDFSEncryption());
assertTrue(distribution.doSupportCreateServiceConnection());
assertTrue((distribution.getNecessaryServiceName() == null ? 0 : distribution.getNecessaryServiceName().size()) == 0);
}
Aggregations