use of io.cdap.cdap.proto.BasicThrowable in project cdap by caskdata.
the class RunRecordCorrectorServiceTest method testFixUnderlyingProgramInWorkflow.
@Test
public void testFixUnderlyingProgramInWorkflow() throws Exception {
AtomicInteger sourceId = new AtomicInteger(0);
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("testArtifact", "1.0").toApiArtifactId();
// set up a workflow
Map<String, String> wfSystemArg = ImmutableMap.of(ProgramOptionConstants.CLUSTER_MODE, ClusterMode.ISOLATED.name(), SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName());
ProgramRunId wfId = NamespaceId.DEFAULT.app("test").workflow("testWF").run(randomRunId());
store.setProvisioning(wfId, Collections.emptyMap(), wfSystemArg, Bytes.toBytes(sourceId.getAndIncrement()), artifactId);
store.setProvisioned(wfId, 0, Bytes.toBytes(sourceId.getAndIncrement()));
store.setStart(wfId, null, Collections.emptyMap(), Bytes.toBytes(sourceId.getAndIncrement()));
store.setRunning(wfId, System.currentTimeMillis(), null, Bytes.toBytes(sourceId.getAndIncrement()));
// set up a spark program inside workflow, and in ISOLATED mode
Map<String, String> spSystemArgs = ImmutableMap.of(ProgramOptionConstants.CLUSTER_MODE, ClusterMode.ISOLATED.name(), SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName(), ProgramOptionConstants.WORKFLOW_NAME, wfId.getProgram(), ProgramOptionConstants.WORKFLOW_RUN_ID, wfId.getRun(), ProgramOptionConstants.WORKFLOW_NODE_ID, "spark");
ProgramRunId spId = NamespaceId.DEFAULT.app("test").spark("phase-1").run(randomRunId());
store.setProvisioning(spId, Collections.emptyMap(), spSystemArgs, Bytes.toBytes(sourceId.getAndIncrement()), artifactId);
store.setProvisioned(spId, 0, Bytes.toBytes(sourceId.getAndIncrement()));
store.setStart(spId, null, Collections.emptyMap(), Bytes.toBytes(sourceId.getAndIncrement()));
store.setRunning(spId, System.currentTimeMillis(), null, Bytes.toBytes(sourceId.getAndIncrement()));
// mark the workflow as finished
store.setStop(wfId, System.currentTimeMillis(), ProgramRunStatus.COMPLETED, Bytes.toBytes(sourceId.getAndIncrement()));
ProgramStateWriter programStateWriter = new NoOpProgramStateWriter() {
@Override
public void error(ProgramRunId programRunId, Throwable failureCause) {
store.setStop(programRunId, System.currentTimeMillis(), ProgramRunStatus.FAILED, new BasicThrowable(failureCause), Bytes.toBytes(sourceId.getAndIncrement()));
}
};
ProgramRuntimeService noOpRuntimeSerivce = new AbstractProgramRuntimeService(cConf, null, new NoOpProgramStateWriter(), null, false) {
@Override
public ProgramLiveInfo getLiveInfo(ProgramId programId) {
return new NotRunningProgramLiveInfo(programId);
}
@Override
public Map<RunId, RuntimeInfo> list(ProgramId program) {
return Collections.emptyMap();
}
};
// Create a run record fixer.
// Set the start buffer time to -1 so that it fixes right away.
RunRecordCorrectorService fixer = new RunRecordCorrectorService(cConf, store, programStateWriter, noOpRuntimeSerivce, namespaceAdmin, datasetFramework, -1L, 5) {
};
fixer.fixRunRecords();
// check the record is fixed for spark program
Assert.assertEquals(ProgramRunStatus.FAILED, store.getRun(spId).getStatus());
}
use of io.cdap.cdap.proto.BasicThrowable in project cdap by caskdata.
the class DefaultStoreTest method testWorkflowNodeState.
@Test
public void testWorkflowNodeState() {
String namespaceName = "namespace1";
String appName = "app1";
String workflowName = "workflow1";
String mapReduceName = "mapReduce1";
String sparkName = "spark1";
ApplicationId appId = Ids.namespace(namespaceName).app(appName);
ProgramId mapReduceProgram = appId.mr(mapReduceName);
ProgramId sparkProgram = appId.spark(sparkName);
long currentTime = System.currentTimeMillis();
String workflowRunId = RunIds.generate(currentTime).getId();
ProgramRunId workflowRun = appId.workflow(workflowName).run(workflowRunId);
ArtifactId artifactId = appId.getParent().artifact("testArtifact", "1.0").toApiArtifactId();
// start Workflow
setStartAndRunning(workflowRun, artifactId);
// start MapReduce as a part of Workflow
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapReduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId mapReduceRunId = RunIds.generate(currentTime + 10);
setStartAndRunning(mapReduceProgram.run(mapReduceRunId.getId()), ImmutableMap.of(), systemArgs, artifactId);
// stop the MapReduce program
store.setStop(mapReduceProgram.run(mapReduceRunId.getId()), currentTime + 50, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// start Spark program as a part of Workflow
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId sparkRunId = RunIds.generate(currentTime + 60);
setStartAndRunning(sparkProgram.run(sparkRunId.getId()), ImmutableMap.of(), systemArgs, artifactId);
// stop the Spark program with failure
NullPointerException npe = new NullPointerException("dataset not found");
IllegalArgumentException iae = new IllegalArgumentException("illegal argument", npe);
store.setStop(sparkProgram.run(sparkRunId.getId()), currentTime + 100, ProgramRunStatus.FAILED, new BasicThrowable(iae), AppFabricTestHelper.createSourceId(++sourceId));
// stop Workflow
store.setStop(workflowRun, currentTime + 110, ProgramRunStatus.FAILED, AppFabricTestHelper.createSourceId(++sourceId));
List<WorkflowNodeStateDetail> nodeStateDetails = store.getWorkflowNodeStates(workflowRun);
Map<String, WorkflowNodeStateDetail> workflowNodeStates = new HashMap<>();
for (WorkflowNodeStateDetail nodeStateDetail : nodeStateDetails) {
workflowNodeStates.put(nodeStateDetail.getNodeId(), nodeStateDetail);
}
Assert.assertEquals(2, workflowNodeStates.size());
WorkflowNodeStateDetail nodeStateDetail = workflowNodeStates.get(mapReduceName);
Assert.assertEquals(mapReduceName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.COMPLETED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(mapReduceRunId.getId(), nodeStateDetail.getRunId());
Assert.assertNull(nodeStateDetail.getFailureCause());
nodeStateDetail = workflowNodeStates.get(sparkName);
Assert.assertEquals(sparkName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.FAILED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(sparkRunId.getId(), nodeStateDetail.getRunId());
BasicThrowable failureCause = nodeStateDetail.getFailureCause();
Assert.assertNotNull(failureCause);
Assert.assertEquals("illegal argument", failureCause.getMessage());
Assert.assertEquals(IllegalArgumentException.class.getName(), failureCause.getClassName());
failureCause = failureCause.getCause();
Assert.assertNotNull(failureCause);
Assert.assertEquals("dataset not found", failureCause.getMessage());
Assert.assertEquals(NullPointerException.class.getName(), failureCause.getClassName());
Assert.assertNull(failureCause.getCause());
}
use of io.cdap.cdap.proto.BasicThrowable in project cdap by caskdata.
the class TaskWorkerServiceTest method testStartAndStopWithInvalidRequest.
@Test
public void testStartAndStopWithInvalidRequest() throws Exception {
InetSocketAddress addr = taskWorkerService.getBindAddress();
URI uri = URI.create(String.format("http://%s:%s", addr.getHostName(), addr.getPort()));
// Post invalid request
RunnableTaskRequest noClassReq = RunnableTaskRequest.getBuilder("NoClass").build();
String reqBody = GSON.toJson(noClassReq);
HttpResponse response = HttpRequests.execute(HttpRequest.post(uri.resolve("/v3Internal/worker/run").toURL()).withBody(reqBody).build(), new DefaultHttpRequestConfig(false));
Assert.assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponseCode());
BasicThrowable basicThrowable;
basicThrowable = GSON.fromJson(response.getResponseBodyAsString(), BasicThrowable.class);
Assert.assertTrue(basicThrowable.getClassName().contains("java.lang.ClassNotFoundException"));
Assert.assertNotNull(basicThrowable.getMessage());
Assert.assertTrue(basicThrowable.getMessage().contains("NoClass"));
Assert.assertNotEquals(basicThrowable.getStackTraces().length, 0);
}
use of io.cdap.cdap.proto.BasicThrowable in project cdap by caskdata.
the class RemoteExecutionException method fromBasicThrowable.
/**
* Converts a {@link BasicThrowable} to a RemoteExecutionException.
*
* @return An exception which retains the local stacktrace.
*/
public static RemoteExecutionException fromBasicThrowable(BasicThrowable basicThrowable) {
BasicThrowable cause = basicThrowable.getCause();
Exception causeException = cause == null ? null : fromBasicThrowable(cause);
RemoteTaskException remoteTaskException = new RemoteTaskException(basicThrowable.getClassName(), basicThrowable.getMessage(), causeException);
remoteTaskException.setStackTrace(basicThrowable.getStackTraces());
// Wrap the remote exception as the cause so that we retain the local stacktrace of the exception.
return new RemoteExecutionException(remoteTaskException);
}
use of io.cdap.cdap.proto.BasicThrowable in project cdap by cdapio.
the class TaskWorkerServiceTest method testStartAndStopWithInvalidRequest.
@Test
public void testStartAndStopWithInvalidRequest() throws Exception {
InetSocketAddress addr = taskWorkerService.getBindAddress();
URI uri = URI.create(String.format("http://%s:%s", addr.getHostName(), addr.getPort()));
// Post invalid request
RunnableTaskRequest noClassReq = RunnableTaskRequest.getBuilder("NoClass").build();
String reqBody = GSON.toJson(noClassReq);
HttpResponse response = HttpRequests.execute(HttpRequest.post(uri.resolve("/v3Internal/worker/run").toURL()).withBody(reqBody).build(), new DefaultHttpRequestConfig(false));
Assert.assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponseCode());
BasicThrowable basicThrowable;
basicThrowable = GSON.fromJson(response.getResponseBodyAsString(), BasicThrowable.class);
Assert.assertTrue(basicThrowable.getClassName().contains("java.lang.ClassNotFoundException"));
Assert.assertNotNull(basicThrowable.getMessage());
Assert.assertTrue(basicThrowable.getMessage().contains("NoClass"));
Assert.assertNotEquals(basicThrowable.getStackTraces().length, 0);
}
Aggregations