use of com.sequenceiq.flow.core.Flow in project cloudbreak by hortonworks.
the class FullBackupActions method backupFailedAction.
@Bean(name = "BACKUP_FAILED_STATE")
public Action<?, ?> backupFailedAction() {
return new AbstractBackupAction<>(StackFailureEvent.class) {
@Inject
private OperationService operationService;
@Override
protected void doExecute(BackupContext context, StackFailureEvent payload, Map<Object, Object> variables) {
LOGGER.error("Full backup failed", payload.getException());
failFlow(context, payload);
if (isOperationIdSet(variables)) {
LOGGER.debug("Fail operation with id: [{}]", getOperationId(variables));
operationService.failOperation(context.getStack().getAccountId(), getOperationId(variables), payload.getException().getMessage());
}
sendEvent(context, new StackEvent(FULL_BACKUP_FAILURE_HANDLED_EVENT.event(), payload.getResourceId()));
}
private void failFlow(BackupContext context, StackFailureEvent payload) {
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
}
};
}
use of com.sequenceiq.flow.core.Flow in project cloudbreak by hortonworks.
the class StartDatahubActions method failedAction.
@Bean(name = "START_DATAHUB_FAILED_STATE")
public Action<?, ?> failedAction() {
return new AbstractSdxAction<>(StartDatahubFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, StartDatahubFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, StartDatahubFailedEvent payload, Map<Object, Object> variables) throws Exception {
Exception exception = payload.getException();
DatalakeStatusEnum failedStatus = DatalakeStatusEnum.START_FAILED;
LOGGER.info("Update SDX status to {} for resource: {}", failedStatus, payload.getResourceId(), exception);
String statusReason = "Datahub start failed";
if (exception.getMessage() != null) {
statusReason = exception.getMessage();
}
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
sdxStatusService.setStatusForDatalakeAndNotify(failedStatus, statusReason, payload.getResourceId());
sendEvent(context, START_DATAHUB_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(StartDatahubFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
use of com.sequenceiq.flow.core.Flow in project cloudbreak by hortonworks.
the class DatalakeRestoreActions method databaseRestoreFailed.
@Bean(name = "DATALAKE_DATABASE_RESTORE_FAILED_STATE")
public Action<?, ?> databaseRestoreFailed() {
return new AbstractSdxAction<>(DatalakeDatabaseRestoreFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeDatabaseRestoreFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, DatalakeDatabaseRestoreFailedEvent payload, Map<Object, Object> variables) {
Exception exception = payload.getException();
LOGGER.error("Datalake database restore could not be started for datalake with id: {}", payload.getResourceId(), exception);
String operationId = (String) variables.get(OPERATION_ID);
sdxBackupRestoreService.updateDatabaseStatusEntry(operationId, SdxOperationStatus.FAILED, exception.getLocalizedMessage());
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
sendEvent(context, DATALAKE_DATABASE_RESTORE_FAILURE_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeDatabaseRestoreFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseRestoreFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.flow.core.Flow in project cloudbreak by hortonworks.
the class DatalakeBackupActions method backupFailed.
@Bean(name = "DATALAKE_BACKUP_FAILED_STATE")
public Action<?, ?> backupFailed() {
return new AbstractSdxAction<>(DatalakeBackupFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeBackupFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, DatalakeBackupFailedEvent payload, Map<Object, Object> variables) {
Exception exception = payload.getException();
LOGGER.error("Datalake backup failed for datalake with id: {}", payload.getResourceId(), exception);
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.RUNNING, ResourceEvent.DATALAKE_BACKUP_FAILED, getFailureReason(variables, exception), payload.getResourceId());
metricService.incrementMetricCounter(MetricType.SDX_BACKUP_FAILED, sdxCluster);
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(exception);
eventSenderService.sendEventAndNotification(sdxCluster, context.getFlowTriggerUserCrn(), ResourceEvent.DATALAKE_BACKUP_FAILED, List.of(exception.getMessage()));
sendEvent(context, DATALAKE_BACKUP_FAILURE_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeBackupFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseBackupFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.flow.core.Flow in project cloudbreak by hortonworks.
the class AbstractStackFailureAction method createFlowContext.
@Override
protected StackFailureContext createFlowContext(FlowParameters flowParameters, StateContext<S, E> stateContext, StackFailureEvent payload) {
Flow flow = getFlow(flowParameters.getFlowId());
Stack stack = stackService.getStackById(payload.getResourceId());
MDCBuilder.buildMdcContext(stack);
flow.setFlowFailed(payload.getException());
return new StackFailureContext(flowParameters, stack);
}
Aggregations