use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxCreateActions method rdsCreation.
@Bean(name = "SDX_CREATION_WAIT_RDS_STATE")
public Action<?, ?> rdsCreation() {
return new AbstractSdxAction<>(StorageValidationSuccessEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, StorageValidationSuccessEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, StorageValidationSuccessEvent payload, Map<Object, Object> variables) throws Exception {
RdsWaitRequest req = new RdsWaitRequest(context);
sendEvent(context, req.selector(), req);
}
@Override
protected Object getFailurePayload(StorageValidationSuccessEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return SdxCreateFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxCreateActions method envWaitInProgress.
@Bean(name = "SDX_CREATION_WAIT_ENV_STATE")
public Action<?, ?> envWaitInProgress() {
return new AbstractSdxAction<>(RdsWaitSuccessEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, RdsWaitSuccessEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, RdsWaitSuccessEvent payload, Map<Object, Object> variables) throws Exception {
EnvWaitRequest req = EnvWaitRequest.from(context);
sendEvent(context, req.selector(), req);
}
@Override
protected Object getFailurePayload(RdsWaitSuccessEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return SdxCreateFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxCreateActions method failedAction.
@Bean(name = "SDX_CREATION_FAILED_STATE")
public Action<?, ?> failedAction() {
return new AbstractSdxAction<>(SdxCreateFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxCreateFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxCreateFailedEvent payload, Map<Object, Object> variables) throws Exception {
Exception exception = payload.getException();
String statusReason = "Datalake creation failed";
String errorMessage = webApplicationExceptionMessageExtractor.getErrorMessage(exception);
if (StringUtils.hasText(errorMessage)) {
statusReason = statusReason + ". " + errorMessage;
} else if (exception.getMessage() != null) {
statusReason = statusReason + ". " + exception.getMessage();
}
LOGGER.error(statusReason, exception);
try {
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.PROVISIONING_FAILED, statusReason, payload.getResourceId());
metricService.incrementMetricCounter(MetricType.SDX_CREATION_FAILED, sdxCluster);
eventSenderService.notifyEvent(context, ResourceEvent.SDX_CLUSTER_CREATION_FAILED);
} catch (NotFoundException notFoundException) {
LOGGER.info("Can not set status to SDX_CREATION_FAILED because data lake was not found");
} catch (DatalakeStatusUpdateException datalakeStatusUpdateException) {
LOGGER.info("Status update for data lake failed (possible reason: ongoing parallel deletion flow): {}", exception.getMessage());
}
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
sendEvent(context, SDX_CREATE_FAILED_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxCreateFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxCreateActions method sdxValidation.
@Bean(name = "SDX_CREATION_VALIDATION_STATE")
public Action<?, ?> sdxValidation() {
return new AbstractSdxAction<>(SdxEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxEvent payload, Map<Object, Object> variables) throws Exception {
setCorrectSdxIdIfNecessary(context, payload);
eventSenderService.notifyEvent(context, ResourceEvent.SDX_CLUSTER_PROVISION_STARTED);
SdxValidationRequest req = new SdxValidationRequest(context);
sendEvent(context, req.selector(), req);
}
@Override
protected Object getFailurePayload(SdxEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return SdxCreateFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxCreateActions method finishedAction.
@Bean(name = "SDX_CREATION_FINISHED_STATE")
public Action<?, ?> finishedAction() {
return new AbstractSdxAction<>(StackCreationSuccessEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, StackCreationSuccessEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, StackCreationSuccessEvent payload, Map<Object, Object> variables) throws Exception {
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.RUNNING, "Datalake is running", payload.getResourceId());
metricService.incrementMetricCounter(MetricType.SDX_CREATION_FINISHED, sdxCluster);
jobService.schedule(context.getSdxId(), SdxClusterJobAdapter.class);
eventSenderService.notifyEvent(context, ResourceEvent.SDX_CLUSTER_PROVISION_FINISHED);
sendEvent(context, SDX_CREATE_FINALIZED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(StackCreationSuccessEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
Aggregations