use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class DatalakeBackupActions method backupCouldNotStart.
@Bean(name = "DATALAKE_DATABASE_BACKUP_COULD_NOT_START_STATE")
public Action<?, ?> backupCouldNotStart() {
return new AbstractSdxAction<>(DatalakeDatabaseBackupCouldNotStartEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeDatabaseBackupCouldNotStartEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, DatalakeDatabaseBackupCouldNotStartEvent payload, Map<Object, Object> variables) {
Exception exception = payload.getException();
LOGGER.error("Datalake database backup could not be started for datalake with id: {}", payload.getResourceId(), exception);
String operationId = (String) variables.get(OPERATION_ID);
sdxBackupRestoreService.updateDatabaseStatusEntry(operationId, SdxOperationStatus.FAILED, payload.getException().getMessage());
SdxCluster sdxCluster = sdxService.getById(payload.getResourceId());
eventSenderService.sendEventAndNotification(sdxCluster, context.getFlowTriggerUserCrn(), ResourceEvent.DATALAKE_DATABASE_BACKUP_FAILED);
sendEvent(context, DATALAKE_DATABASE_BACKUP_FAILURE_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeDatabaseBackupCouldNotStartEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseBackupFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxDetachActions method sdxDetachExternalDBFailedAction.
@Bean(name = "SDX_DETACH_EXTERNAL_DB_FAILED_STATE")
public Action<?, ?> sdxDetachExternalDBFailedAction() {
return new AbstractSdxAction<>(SdxDetachFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxDetachFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxDetachFailedEvent payload, Map<Object, Object> variables) {
if (!((boolean) variables.get(IS_DETACH_DURING_RECOVERY))) {
SdxCluster detached = (SdxCluster) variables.get(DETACHED_SDX);
LOGGER.error("Failed to detach external DB of SDX with ID: {}. Attempting to restore it.", detached.getId());
String detachedName = detached.getClusterName();
SdxCluster reattached = sdxAttachService.reattachCluster(detached);
sdxAttachService.reattachStack(reattached, detachedName);
LOGGER.info("Successfully restored detached SDX with ID {} which failed to detach its external database.", reattached.getId());
}
sendEvent(context, SDX_DETACH_FAILED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxDetachFailedEvent payload, Optional<SdxContext> flowContext, Exception e) {
LOGGER.error("Failed to reattach SDX with ID {} which failed to detach its external database.", payload.getResourceId());
return payload;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxDetachActions method sdxDetachStackFailedAction.
@Bean(name = "SDX_DETACH_STACK_FAILED_STATE")
public Action<?, ?> sdxDetachStackFailedAction() {
return new AbstractSdxAction<>(SdxDetachFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxDetachFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxDetachFailedEvent payload, Map<Object, Object> variables) {
if (!((boolean) variables.get(IS_DETACH_DURING_RECOVERY))) {
SdxCluster clusterToReattach = (SdxCluster) variables.get(DETACHED_SDX);
LOGGER.error("Failed to detach stack of SDX with ID: {}. Attempting to restore it.", clusterToReattach.getId());
clusterToReattach = sdxAttachService.reattachCluster(clusterToReattach);
LOGGER.info("Successfully restored detached SDX with ID {} which failed to detach its stack.", clusterToReattach.getId());
}
sendEvent(context, SDX_DETACH_FAILED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxDetachFailedEvent payload, Optional<SdxContext> flowContext, Exception e) {
LOGGER.error("Failed to reattach SDX with ID {} which failed to detach its stack.", payload.getResourceId());
return payload;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxDetachRecoveryActions method sdxDetachRecoveryAction.
@Bean(name = "SDX_DETACH_RECOVERY_STATE")
public Action<?, ?> sdxDetachRecoveryAction() {
return new AbstractSdxAction<>(SdxStartDetachRecoveryEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxStartDetachRecoveryEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxStartDetachRecoveryEvent payload, Map<Object, Object> variables) throws Exception {
SdxCluster clusterToReattach = sdxService.getById(payload.getResourceId());
clusterToReattach = sdxAttachService.reattachDetachedSdxCluster(clusterToReattach);
LOGGER.info("Successfully restored detached SDX with ID {}.", clusterToReattach.getId());
sendEvent(context, SDX_DETACH_RECOVERY_SUCCESS_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxStartDetachRecoveryEvent payload, Optional<SdxContext> flowContext, Exception e) {
LOGGER.error("Failed to recover from detach of SDX with ID {}.", payload.getResourceId());
return SdxDetachRecoveryFailedEvent.from(payload, e);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class DatalakeBackupActions method datalakeBackupInProgress.
@Bean(name = "DATALAKE_DATABASE_BACKUP_IN_PROGRESS_STATE")
public Action<?, ?> datalakeBackupInProgress() {
return new AbstractSdxAction<>(SdxEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxEvent payload, Map<Object, Object> variables) {
LOGGER.info("Datalake database backup is in progress for {} ", payload.getResourceId());
String operationId = (String) variables.get(OPERATION_ID);
sdxBackupRestoreService.updateDatabaseStatusEntry(operationId, SdxOperationStatus.INPROGRESS, null);
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.DATALAKE_BACKUP_INPROGRESS, ResourceEvent.DATALAKE_BACKUP_IN_PROGRESS, "Datalake backup in progress", payload.getResourceId());
metricService.incrementMetricCounter(MetricType.SDX_BACKUP_REQUESTED, sdxCluster);
sendEvent(context, DatalakeDatabaseBackupWaitRequest.from(context, operationId));
}
@Override
protected Object getFailurePayload(SdxEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseBackupFailedEvent.from(payload, ex);
}
};
}
Aggregations