use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxStopActions method failedAction.
@Bean(name = "SDX_STOP_FAILED_STATE")
public Action<?, ?> failedAction() {
return new AbstractSdxAction<>(SdxFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxFailedEvent payload, Map<Object, Object> variables) throws Exception {
Exception exception = payload.getException();
DatalakeStatusEnum failedStatus = DatalakeStatusEnum.STOP_FAILED;
LOGGER.info("Update SDX status to {} for resource: {}", failedStatus, payload.getResourceId(), exception);
String statusReason = "SDX stop failed";
if (exception.getMessage() != null) {
statusReason = exception.getMessage();
}
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
// If this is part of DL resize, mark failure as such in order to enable proper recovery.
Optional<FlowLog> lastFlowLog = flowLogService.getLastFlowLog(context.getFlowParameters().getFlowId());
if (lastFlowLog.isPresent()) {
Optional<FlowChainLog> flowChainLog = flowChainLogService.findFirstByFlowChainIdOrderByCreatedDesc(lastFlowLog.get().getFlowChainId());
if (flowChainLog.isPresent() && flowChainLog.get().getFlowChainType().equals(DatalakeResizeFlowEventChainFactory.class.getSimpleName())) {
statusReason = "Datalake resize failure: " + statusReason;
}
}
eventSenderService.notifyEvent(context, ResourceEvent.SDX_STOP_FAILED);
sdxStatusService.setStatusForDatalakeAndNotify(failedStatus, statusReason, payload.getResourceId());
sendEvent(context, SDX_STOP_FAILED_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxStartActions method finishedAction.
@Bean(name = "SDX_START_FINISHED_STATE")
public Action<?, ?> finishedAction() {
return new AbstractSdxAction<>(SdxStartSuccessEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxStartSuccessEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxStartSuccessEvent payload, Map<Object, Object> variables) throws Exception {
LOGGER.info("SDX start finalized: {}", payload.getResourceId());
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.RUNNING, "Datalake is running", payload.getResourceId());
metricService.incrementMetricCounter(MetricType.SDX_START_FINISHED, sdxCluster);
sendEvent(context, SDX_START_FINALIZED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxStartSuccessEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxStartActions method failedAction.
@Bean(name = "SDX_START_FAILED_STATE")
public Action<?, ?> failedAction() {
return new AbstractSdxAction<>(SdxFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxFailedEvent payload, Map<Object, Object> variables) throws Exception {
Exception exception = payload.getException();
DatalakeStatusEnum failedStatus = DatalakeStatusEnum.START_FAILED;
LOGGER.info("Update SDX status to {} for resource: {}", failedStatus, payload.getResourceId(), exception);
String statusReason = "SDX start failed";
if (exception.getMessage() != null) {
statusReason = exception.getMessage();
}
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(failedStatus, statusReason, payload.getResourceId());
metricService.incrementMetricCounter(MetricType.SDX_START_FAILED, sdxCluster);
sendEvent(context, SDX_START_FAILED_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class UpgradeCcmActions method upgradeCcmStack.
@Bean(name = "UPGRADE_CCM_UPGRADE_STACK_STATE")
public Action<?, ?> upgradeCcmStack() {
return new AbstractUpgradeCcmSdxAction<>(UpgradeCcmStackEvent.class) {
@Override
protected void doExecute(SdxContext context, UpgradeCcmStackEvent payload, Map<Object, Object> variables) {
LOGGER.info("Execute CCM upgrade stack flow for SDX: {}", payload.getResourceId());
UpgradeCcmStackRequest request = UpgradeCcmStackRequest.from(context);
sendEvent(context, request);
}
@Override
protected Object getFailurePayload(UpgradeCcmStackEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return UpgradeCcmFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class UpgradeCcmActions method failedAction.
@Bean(name = "UPGRADE_CCM_FAILED_STATE")
public Action<?, ?> failedAction() {
return new AbstractUpgradeCcmSdxAction<>(UpgradeCcmFailedEvent.class) {
@Override
protected void doExecute(SdxContext context, UpgradeCcmFailedEvent payload, Map<Object, Object> variables) {
Exception exception = payload.getException();
DatalakeStatusEnum failedStatus = DatalakeStatusEnum.DATALAKE_UPGRADE_CCM_FAILED;
LOGGER.info("Update SDX status to {} for resource: {}", failedStatus, payload.getResourceId(), exception);
String statusReason = "Cluster Connectivity Manager upgrade failed";
if (exception.getMessage() != null) {
statusReason = exception.getMessage();
}
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(failedStatus, statusReason, payload.getResourceId());
metricService.incrementMetricCounter(MetricType.UPGRADE_CCM_FAILED, sdxCluster);
sendEvent(context, UPGRADE_CCM_FAILED_HANDLED_EVENT.event(), payload);
}
};
}
Aggregations