use of io.cdap.cdap.proto.RunRecord in project cdap by caskdata.
the class SupportBundlePipelineInfoTaskTest method generateWorkflowLog.
private String generateWorkflowLog() throws Exception {
deploy(AppWithWorkflow.class, 200, Constants.Gateway.API_VERSION_3_TOKEN, NAMESPACE.getNamespace());
long startTime = System.currentTimeMillis();
ProgramId workflowProgram = new ProgramId(NAMESPACE.getNamespace(), AppWithWorkflow.NAME, ProgramType.WORKFLOW, AppWithWorkflow.SampleWorkflow.NAME);
RunId workflowRunId = RunIds.generate(startTime);
ArtifactId artifactId = NAMESPACE.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
setStartAndRunning(workflowProgram, workflowRunId.getId(), artifactId);
List<RunRecord> runs = getProgramRuns(workflowProgram, ProgramRunStatus.RUNNING);
Assert.assertEquals(1, runs.size());
HttpResponse appsResponse = doGet(getVersionedAPIPath("apps/", Constants.Gateway.API_VERSION_3_TOKEN, NAMESPACE.getNamespace()));
Assert.assertEquals(200, appsResponse.getResponseCode());
// workflow ran for 1 minute
long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(startTime) + 60;
store.setStop(workflowProgram.run(workflowRunId.getId()), workflowStopTime, ProgramRunStatus.COMPLETED, SupportBundleTestHelper.createSourceId(++sourceId));
return runs.get(0).getPid();
}
use of io.cdap.cdap.proto.RunRecord in project cdap by caskdata.
the class AuthorizationTest method assertProgramFailure.
private <T extends ProgramManager> void assertProgramFailure(Map<String, String> programArgs, final ProgramManager<T> programManager) throws TimeoutException, InterruptedException, ExecutionException {
final int prevNumFailures = programManager.getHistory(ProgramRunStatus.FAILED).size();
programManager.start(programArgs);
// need to check that every run has failed as well as the number of failures
// otherwise there is a race where start() returns before any run record is written
// and this check passes because there are existing failed runs, but the new run has not failed.
Tasks.waitFor(true, () -> {
// verify program history just have failures, and there is one more failure than before program start
List<RunRecord> history = programManager.getHistory();
for (final RunRecord runRecord : history) {
if (runRecord.getStatus() != ProgramRunStatus.FAILED) {
return false;
}
}
return history.size() == prevNumFailures + 1;
}, 5, TimeUnit.MINUTES, "Not all program runs have failed status. Expected all run status to be failed");
programManager.waitForStopped(10, TimeUnit.SECONDS);
}
use of io.cdap.cdap.proto.RunRecord in project cdap by caskdata.
the class TestFrameworkTestRun method testServiceManager.
@Test
public void testServiceManager() throws Exception {
ApplicationManager applicationManager = deployApplication(AppWithServices.class);
final ServiceManager serviceManager = applicationManager.getServiceManager(AppWithServices.SERVICE_NAME);
serviceManager.setInstances(2);
Assert.assertEquals(0, serviceManager.getProvisionedInstances());
Assert.assertEquals(2, serviceManager.getRequestedInstances());
Assert.assertFalse(serviceManager.isRunning());
List<RunRecord> history = serviceManager.getHistory();
Assert.assertEquals(0, history.size());
serviceManager.start();
serviceManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
Assert.assertEquals(2, serviceManager.getProvisionedInstances());
// requesting with ProgramRunStatus.KILLED returns empty list
history = serviceManager.getHistory(ProgramRunStatus.KILLED);
Assert.assertEquals(0, history.size());
// requesting with either RUNNING or ALL will return one record
Tasks.waitFor(1, new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return serviceManager.getHistory(ProgramRunStatus.RUNNING).size();
}
}, 5, TimeUnit.SECONDS);
history = serviceManager.getHistory(ProgramRunStatus.RUNNING);
Assert.assertEquals(ProgramRunStatus.RUNNING, history.get(0).getStatus());
history = serviceManager.getHistory(ProgramRunStatus.ALL);
Assert.assertEquals(1, history.size());
Assert.assertEquals(ProgramRunStatus.RUNNING, history.get(0).getStatus());
}
use of io.cdap.cdap.proto.RunRecord in project cdap by caskdata.
the class TestFrameworkTestRun method testWorkflowCondition.
@Test
public void testWorkflowCondition() throws Exception {
ApplicationManager applicationManager = deployApplication(testSpace, ConditionalWorkflowApp.class);
final WorkflowManager wfmanager = applicationManager.getWorkflowManager("ConditionalWorkflow");
wfmanager.start(ImmutableMap.of("configurable.condition", "true"));
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return wfmanager.getHistory(ProgramRunStatus.COMPLETED).size() == 1;
}
}, 30, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
List<RunRecord> history = wfmanager.getHistory();
String pid = history.get(0).getPid();
WorkflowTokenNodeDetail tokenNodeDetail = wfmanager.getTokenAtNode(pid, "MyConfigurableCondition", WorkflowToken.Scope.USER, null);
Map<String, String> expected = ImmutableMap.of("configurable.condition.initialize", "true", "configurable.condition.destroy", "true", "configurable.condition.apply", "true");
Assert.assertEquals(expected, tokenNodeDetail.getTokenDataAtNode());
tokenNodeDetail = wfmanager.getTokenAtNode(pid, "SimpleCondition", WorkflowToken.Scope.USER, null);
expected = ImmutableMap.of("simple.condition.initialize", "true");
Assert.assertEquals(expected, tokenNodeDetail.getTokenDataAtNode());
tokenNodeDetail = wfmanager.getTokenAtNode(pid, "action2", WorkflowToken.Scope.USER, null);
expected = ImmutableMap.of("action.name", "action2");
Assert.assertEquals(expected, tokenNodeDetail.getTokenDataAtNode());
}
use of io.cdap.cdap.proto.RunRecord in project cdap by caskdata.
the class TestFrameworkTestRun method testAppWithPlugin.
@Test
public void testAppWithPlugin() throws Exception {
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("app-with-plugin", "1.0.0-SNAPSHOT");
addAppArtifact(artifactId, AppWithPlugin.class);
ArtifactId pluginArtifactId = NamespaceId.DEFAULT.artifact("test-plugin", "1.0.0-SNAPSHOT");
addPluginArtifact(pluginArtifactId, artifactId, ToStringPlugin.class);
ApplicationId appId = NamespaceId.DEFAULT.app("AppWithPlugin");
AppRequest createRequest = new AppRequest(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()));
ApplicationManager appManager = deployApplication(appId, createRequest);
final WorkerManager workerManager = appManager.getWorkerManager(AppWithPlugin.WORKER);
workerManager.start();
workerManager.waitForRun(ProgramRunStatus.COMPLETED, 10, TimeUnit.SECONDS);
final ServiceManager serviceManager = appManager.getServiceManager(AppWithPlugin.SERVICE);
serviceManager.start();
serviceManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
URL serviceURL = serviceManager.getServiceURL(5, TimeUnit.SECONDS);
callServiceGet(serviceURL, "dummy");
serviceManager.stop();
serviceManager.waitForStopped(10, TimeUnit.SECONDS);
WorkflowManager workflowManager = appManager.getWorkflowManager(AppWithPlugin.WORKFLOW);
workflowManager.start();
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
List<RunRecord> runRecords = workflowManager.getHistory();
Assert.assertNotEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
DataSetManager<KeyValueTable> workflowTableManager = getDataset(AppWithPlugin.WORKFLOW_TABLE);
String value = Bytes.toString(workflowTableManager.get().read("val"));
Assert.assertEquals(AppWithPlugin.TEST, value);
Map<String, String> workflowTags = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, "AppWithPlugin", Constants.Metrics.Tag.WORKFLOW, AppWithPlugin.WORKFLOW, Constants.Metrics.Tag.RUN_ID, runRecords.get(0).getPid());
getMetricsManager().waitForTotalMetricCount(workflowTags, String.format("user.destroy.%s", AppWithPlugin.WORKFLOW), 1, 60, TimeUnit.SECONDS);
// Testing Spark Plugins. First send some data to fileset for the Spark program to process
DataSetManager<FileSet> fileSetManager = getDataset(AppWithPlugin.SPARK_INPUT);
FileSet fileSet = fileSetManager.get();
try (PrintStream out = new PrintStream(fileSet.getLocation("input").append("file.txt").getOutputStream(), true, "UTF-8")) {
for (int i = 0; i < 5; i++) {
out.println("Message " + i);
}
}
Map<String, String> sparkArgs = new HashMap<>();
FileSetArguments.setInputPath(sparkArgs, "input");
SparkManager sparkManager = appManager.getSparkManager(AppWithPlugin.SPARK).start(sparkArgs);
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
// Verify the Spark result.
DataSetManager<Table> dataSetManager = getDataset(AppWithPlugin.SPARK_TABLE);
Table table = dataSetManager.get();
try (Scanner scanner = table.scan(null, null)) {
for (int i = 0; i < 5; i++) {
Row row = scanner.next();
Assert.assertNotNull(row);
String expected = "Message " + i + " " + AppWithPlugin.TEST;
Assert.assertEquals(expected, Bytes.toString(row.getRow()));
Assert.assertEquals(expected, Bytes.toString(row.get(expected)));
}
// There shouldn't be any more rows in the table.
Assert.assertNull(scanner.next());
}
}
Aggregations