use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class MockClusterCreationWithSaltSuccessTest method testClusterCreation.
@Test
@Parameters({ "clusterName", "ambariPort", "ambariUser", "ambariPassword", "emailNeeded", "enableSecurity", "kerberosMasterKey", "kerberosAdmin", "kerberosPassword", "runRecipesOnHosts", "checkAmbari", "mockPort" })
public void testClusterCreation(@Optional("it-cluster") String clusterName, @Optional("8080") String ambariPort, @Optional("admin") String ambariUser, @Optional("admin123!@#") String ambariPassword, @Optional("false") boolean emailNeeded, @Optional("false") boolean enableSecurity, @Optional String kerberosMasterKey, @Optional String kerberosAdmin, @Optional String kerberosPassword, @Optional("") String runRecipesOnHosts, @Optional("true") boolean checkAmbari, @Optional("9443") int mockPort) throws Exception {
// GIVEN
IntegrationTestContext itContext = getItContext();
String stackIdStr = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID);
Integer stackId = Integer.valueOf(stackIdStr);
Integer blueprintId = Integer.valueOf(itContext.getContextParam(CloudbreakITContextConstants.BLUEPRINT_ID));
List<HostGroup> hostgroups = itContext.getContextParam(CloudbreakITContextConstants.HOSTGROUP_ID, List.class);
Set<HostGroupRequest> hostGroupJsons1 = convertHostGroups(hostgroups, runRecipesOnHosts);
itContext.putContextParam(CloudbreakITContextConstants.AMBARI_USER_ID, ambariUser);
itContext.putContextParam(CloudbreakITContextConstants.AMBARI_PASSWORD_ID, ambariPassword);
// WHEN
ClusterRequest clusterRequest = new ClusterRequest();
clusterRequest.setName(clusterName);
clusterRequest.setDescription("Cluster for integration test");
clusterRequest.setEmailNeeded(emailNeeded);
clusterRequest.setEnableSecurity(enableSecurity);
clusterRequest.setPassword(ambariPassword);
clusterRequest.setUserName(ambariUser);
clusterRequest.setBlueprintId(Long.valueOf(blueprintId));
clusterRequest.setHostGroups(hostGroupJsons1);
if (enableSecurity) {
KerberosRequest kerberosRequest = new KerberosRequest();
kerberosRequest.setAdmin(kerberosAdmin);
kerberosRequest.setPassword(kerberosPassword);
kerberosRequest.setMasterKey(kerberosMasterKey);
clusterRequest.setKerberos(kerberosRequest);
}
GatewayJson gatewayJson = new GatewayJson();
gatewayJson.setEnableGateway(Boolean.TRUE);
gatewayJson.setExposedServices(ImmutableList.of("ALL"));
clusterRequest.setGateway(gatewayJson);
ClusterV1Endpoint clusterV1Endpoint = getCloudbreakClient().clusterEndpoint();
Long clusterId = clusterV1Endpoint.post(Long.valueOf(stackId), clusterRequest).getId();
// THEN
Assert.assertNotNull(clusterId);
CloudbreakUtil.waitAndCheckStackStatus(getCloudbreakClient(), stackIdStr, "AVAILABLE");
CloudbreakUtil.checkClusterAvailability(getCloudbreakClient().stackV1Endpoint(), ambariPort, stackIdStr, ambariUser, ambariPassword, checkAmbari);
StackCreationMock stackCreationMock = getItContext().getContextParam(CloudbreakV2Constants.MOCK_SERVER, StackCreationMock.class);
stackCreationMock.verifyCalls(clusterName);
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class MockClusterScalingTest method configMockServer.
@BeforeClass
@Parameters({ "mockPort", "sshPort", "scalingAdjustment", "instanceGroup" })
public void configMockServer(@Optional("9443") int mockPort, @Optional("2020") int sshPort, @Optional("1") int scalingAdjustment, String instanceGroup) {
IntegrationTestContext itContext = getItContext();
Map<String, CloudVmMetaDataStatus> instanceMap = itContext.getContextParam(CloudbreakITContextConstants.MOCK_INSTANCE_MAP, Map.class);
ScalingMock scalingMock = (ScalingMock) applicationContext.getBean(ScalingMock.NAME, mockPort, sshPort, instanceMap);
scalingMock.addSPIEndpoints();
scalingMock.addMockEndpoints();
scalingMock.addAmbariMappings(CLUSTER_NAME);
if (scalingAdjustment > 0) {
scalingMock.addInstance(scalingAdjustment);
}
itContext.putContextParam(CloudbreakV2Constants.MOCK_SERVER, scalingMock);
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class MockKerberizedClusterCreationTest method setContextParameters.
@BeforeMethod
public void setContextParameters() {
IntegrationTestContext itContext = getItContext();
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.BLUEPRINT_ID), "Blueprint id is mandatory.");
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.STACK_ID), "Stack id is mandatory.");
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class MockStackCreationWithSaltSuccessTest method setContextParams.
@BeforeMethod
public void setContextParams() {
IntegrationTestContext itContext = getItContext();
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.TEMPLATE_ID, List.class), "Template id is mandatory.");
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.CREDENTIAL_ID), "Credential id is mandatory.");
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.NETWORK_ID), "Network id is mandatory.");
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.SECURITY_GROUP_ID), "Security group id is mandatory.");
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class CountRecipeResultsTest method testFetchRecipeResult.
@Test
@Parameters({ "searchRecipesOnHosts", "lookingFor", "require" })
public void testFetchRecipeResult(String searchRecipesOnHosts, String lookingFor, Integer require) throws Exception {
// GIVEN
Assert.assertEquals(new File(defaultPrivateKeyFile).exists(), true, "Private cert file not found: " + defaultPrivateKeyFile);
Assert.assertFalse(lookingFor.isEmpty());
IntegrationTestContext itContext = getItContext();
String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID);
StackV1Endpoint stackV1Endpoint = itContext.getContextParam(CloudbreakITContextConstants.CLOUDBREAK_CLIENT, CloudbreakClient.class).stackV1Endpoint();
List<InstanceGroupResponse> instanceGroups = stackV1Endpoint.get(Long.valueOf(stackId), new HashSet<>()).getInstanceGroups();
String[] files = lookingFor.split(",");
List<String> publicIps = getPublicIps(instanceGroups, Arrays.asList(searchRecipesOnHosts.split(",")));
Collection<Future<Boolean>> futures = new ArrayList<>(publicIps.size() * files.length);
ExecutorService executorService = Executors.newFixedThreadPool(publicIps.size());
AtomicInteger count = new AtomicInteger(0);
// WHEN
try {
for (String file : files) {
for (String ip : publicIps) {
futures.add((Future<Boolean>) executorService.submit(() -> {
if (findFile(ip, file)) {
count.incrementAndGet();
}
}));
}
}
for (Future<Boolean> future : futures) {
future.get();
}
} finally {
executorService.shutdown();
}
// THEN
Assert.assertEquals(count.get(), require.intValue(), "The number of existing files is different than required.");
}
Aggregations