Search in sources :

Example 1 with CustomDistribution

use of org.talend.hadoop.distribution.custom.CustomDistribution in project tbd-studio-se by Talend.

the class CustomDistributionTest method testCustomDistribution.

@Test
public void testCustomDistribution() throws Exception {
    HadoopComponent distribution = new CustomDistribution();
    assertNotNull(distribution.getDistributionName());
    assertNull(distribution.getVersionName(null));
    assertTrue(distribution.doSupportS3());
    assertEquals(CustomDistribution.DISTRIBUTION_NAME, distribution.getDistribution());
    assertNull(distribution.getVersion());
    assertNull(distribution.getHadoopVersion());
    assertTrue(distribution.doSupportKerberos());
    assertTrue(distribution.doSupportUseDatanodeHostname());
    assertFalse(distribution.doSupportGroup());
    assertTrue(distribution.doSupportOldImportMode());
    assertTrue(((HDFSComponent) distribution).doSupportSequenceFileShortType());
    assertFalse(((MRComponent) distribution).isExecutedThroughWebHCat());
    assertFalse(((MRComponent) distribution).doSupportCrossPlatformSubmission());
    assertTrue(((MRComponent) distribution).doSupportImpersonation());
    assertEquals(DEFAULT_YARN_APPLICATION_CLASSPATH, ((MRComponent) distribution).getYarnApplicationClasspath());
    assertFalse(((HBaseComponent) distribution).doSupportNewHBaseAPI());
    assertTrue(((SqoopComponent) distribution).doJavaAPISupportStorePasswordInFile());
    assertFalse(((SqoopComponent) distribution).doJavaAPISqoopImportSupportDeleteTargetDir());
    assertTrue(((SqoopComponent) distribution).doJavaAPISqoopImportAllTablesSupportExcludeTable());
    assertTrue(((HiveComponent) distribution).doSupportEmbeddedMode());
    assertTrue(((HiveComponent) distribution).doSupportStandaloneMode());
    assertTrue(((HiveComponent) distribution).doSupportHive1());
    assertTrue(((HiveComponent) distribution).doSupportHive2());
    assertTrue(((HiveComponent) distribution).doSupportTezForHive());
    assertTrue(((HiveComponent) distribution).doSupportHBaseForHive());
    assertTrue(((HiveComponent) distribution).doSupportSSL());
    assertTrue(((HiveComponent) distribution).doSupportORCFormat());
    assertTrue(((HiveComponent) distribution).doSupportAvroFormat());
    assertTrue(((HiveComponent) distribution).doSupportParquetFormat());
    assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_0));
    assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_6));
    assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_5));
    assertFalse(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_4));
    assertTrue(((SparkBatchComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_3));
    assertTrue(((SparkBatchComponent) distribution).doSupportDynamicMemoryAllocation());
    assertFalse(((SparkBatchComponent) distribution).isExecutedThroughSparkJobServer());
    assertTrue(((SparkBatchComponent) distribution).doSupportSparkStandaloneMode());
    assertTrue(((SparkBatchComponent) distribution).doSupportSparkYarnClientMode());
    assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_2_0));
    assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_6));
    assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_5));
    assertFalse(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_4));
    assertTrue(((SparkStreamingComponent) distribution).getSparkVersions().contains(ESparkVersion.SPARK_1_3));
    assertTrue(((SparkStreamingComponent) distribution).doSupportDynamicMemoryAllocation());
    assertFalse(((SparkStreamingComponent) distribution).isExecutedThroughSparkJobServer());
    assertTrue(((SparkStreamingComponent) distribution).doSupportCheckpointing());
    assertTrue(((SparkStreamingComponent) distribution).doSupportSparkStandaloneMode());
    assertTrue(((SparkStreamingComponent) distribution).doSupportSparkYarnClientMode());
    assertTrue(((SparkStreamingComponent) distribution).doSupportBackpressure());
    assertFalse(((HiveComponent) distribution).doSupportStoreAsParquet());
    assertFalse(((HiveComponent) distribution).doSupportClouderaNavigator());
    assertTrue(distribution instanceof HCatalogComponent);
    assertTrue(distribution instanceof ImpalaComponent);
    assertTrue(distribution.doSupportCreateServiceConnection());
    assertTrue((distribution.getNecessaryServiceName() == null ? 0 : distribution.getNecessaryServiceName().size()) == 0);
    assertFalse(distribution.doSupportAzureDataLakeStorage());
    assertFalse(distribution.doSupportWebHDFS());
}
Also used : ImpalaComponent(org.talend.hadoop.distribution.component.ImpalaComponent) SparkBatchComponent(org.talend.hadoop.distribution.component.SparkBatchComponent) HadoopComponent(org.talend.hadoop.distribution.component.HadoopComponent) SparkStreamingComponent(org.talend.hadoop.distribution.component.SparkStreamingComponent) HCatalogComponent(org.talend.hadoop.distribution.component.HCatalogComponent) CustomDistribution(org.talend.hadoop.distribution.custom.CustomDistribution) Test(org.junit.Test) AbstractDistributionTest(org.talend.hadoop.distribution.test.AbstractDistributionTest)

Aggregations

Test (org.junit.Test)1 HCatalogComponent (org.talend.hadoop.distribution.component.HCatalogComponent)1 HadoopComponent (org.talend.hadoop.distribution.component.HadoopComponent)1 ImpalaComponent (org.talend.hadoop.distribution.component.ImpalaComponent)1 SparkBatchComponent (org.talend.hadoop.distribution.component.SparkBatchComponent)1 SparkStreamingComponent (org.talend.hadoop.distribution.component.SparkStreamingComponent)1 CustomDistribution (org.talend.hadoop.distribution.custom.CustomDistribution)1 AbstractDistributionTest (org.talend.hadoop.distribution.test.AbstractDistributionTest)1