use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class FilesystemTest method testFileSystem.
@Test
@Parameters({ "filesystemType", "filesystemName", "folderPrefix", "wasbContainerName", "sshCommand", "sshUser", "sshChecker" })
public void testFileSystem(String filesystemType, String filesystemName, String folderPrefix, @Optional("it-container") String wasbContainerName, String sshCommand, @Optional("cloudbreak") String sshUser, String sshChecker) throws IOException {
// GIVEN
Assert.assertEquals(new File(defaultPrivateKeyFile).exists(), true, "Private cert file not found: " + defaultPrivateKeyFile);
fsParams.put("filesystemType", filesystemType);
fsParams.put("filesystemName", filesystemName);
fsParams.put("folderPrefix", folderPrefix);
fsParams.put("wasbContainerName", wasbContainerName);
IntegrationTestContext itContext = getItContext();
String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID);
StackV1Endpoint stackV1Endpoint = itContext.getContextParam(CloudbreakITContextConstants.CLOUDBREAK_CLIENT, CloudbreakClient.class).stackV1Endpoint();
String masterIp = CloudbreakUtil.getAmbariIp(stackV1Endpoint, stackId, itContext);
Map<String, String> cloudProviderParams = itContext.getContextParam(CloudbreakITContextConstants.CLOUDPROVIDER_PARAMETERS, Map.class);
sshCommand = ResourceUtil.readStringFromResource(applicationContext, sshCommand.replaceAll("\n", ""));
if ("WASB".equals(filesystemType)) {
FilesystemUtil.createWasbContainer(cloudProviderParams, filesystemName, wasbContainerName);
}
// WHEN
boolean sshResult = SshUtil.executeCommand(masterIp, defaultPrivateKeyFile, sshCommand, sshUser, SshUtil.getSshCheckMap(sshChecker));
// THEN
Assert.assertTrue(sshResult, "Ssh command executing was not successful");
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class FilesystemTest method cleanUpFilesystem.
@AfterTest
public void cleanUpFilesystem() throws Exception {
IntegrationTestContext itContext = getItContext();
Map<String, String> cloudProviderParams = itContext.getContextParam(CloudbreakITContextConstants.CLOUDPROVIDER_PARAMETERS, Map.class);
FilesystemUtil.cleanUpFiles(applicationContext, cloudProviderParams, fsParams.get("filesystemType"), fsParams.get("filesystemName"), fsParams.get("folderPrefix"), fsParams.get("wasbContainerName"));
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class AwsCheckSpotInstance method setContextParams.
@BeforeMethod
public void setContextParams() {
IntegrationTestContext itContext = getItContext();
Assert.assertNotNull("Stack id is mandatory.", itContext.getContextParam(CloudbreakITContextConstants.STACK_ID));
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class AwsCheckSpotInstance method checkSpotInstance.
@Parameters({ "region", "hostGroupToCheck", "scalingAdjustment" })
@Test
public void checkSpotInstance(Regions region, String hostGroupToCheck, @Optional Integer scalingAdjustment) {
// GIVEN
Integer spotInstanceCount = 0;
IntegrationTestContext itContext = getItContext();
String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID);
StackV1Endpoint stackV1Endpoint = getCloudbreakClient().stackV1Endpoint();
StackResponse stackResponse = stackV1Endpoint.get(Long.valueOf(stackId), new HashSet<>());
List<InstanceGroupResponse> instanceGroups = stackResponse.getInstanceGroups();
Collection<String> instanceIdList = new ArrayList<>();
List<String> hostGroupList = Arrays.asList(hostGroupToCheck.split(","));
for (InstanceGroupResponse instanceGroup : instanceGroups) {
if (hostGroupList.contains(instanceGroup.getGroup())) {
Set<InstanceMetaDataJson> instanceMetaData = instanceGroup.getMetadata();
for (InstanceMetaDataJson metaData : instanceMetaData) {
instanceIdList.add(metaData.getInstanceId());
}
}
}
// WHEN
AmazonEC2 ec2 = AmazonEC2ClientBuilder.standard().withRegion(region).build();
DescribeSpotInstanceRequestsResult describeSpotInstanceRequestsResult = ec2.describeSpotInstanceRequests();
List<SpotInstanceRequest> spotInstanceRequests = describeSpotInstanceRequestsResult.getSpotInstanceRequests();
// THEN
Assert.assertFalse(spotInstanceRequests.isEmpty());
Collection<String> spotInstanceIdList = new ArrayList<>();
for (SpotInstanceRequest request : spotInstanceRequests) {
spotInstanceIdList.add(request.getInstanceId());
}
for (String id : instanceIdList) {
Assert.assertTrue(spotInstanceIdList.contains(id));
if (spotInstanceIdList.contains(id)) {
spotInstanceCount += 1;
}
}
if (scalingAdjustment != null) {
Assert.assertNotNull(itContext.getContextParam(CloudbreakITContextConstants.INSTANCE_COUNT, List.class));
Integer instanceCountPrev = 0;
for (String hostGroup : hostGroupList) {
List<Map<String, Integer>> instanceList = itContext.getContextParam(CloudbreakITContextConstants.INSTANCE_COUNT, List.class);
Assert.assertTrue(instanceList.size() >= 2);
instanceCountPrev += instanceList.get(instanceList.size() - 2).get(hostGroup);
}
Assert.assertEquals(Integer.valueOf(instanceCountPrev + scalingAdjustment), spotInstanceCount);
}
}
use of com.sequenceiq.it.IntegrationTestContext in project cloudbreak by hortonworks.
the class SshKeysTests method setup.
@BeforeTest
public void setup() throws Exception {
given(CloudbreakClient.isCreated());
given(cloudProvider.aValidCredential());
IntegrationTestContext it = getItContext();
credentialId = Credential.getTestContextCredential().apply(it).getResponse().getId();
}
Aggregations