use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class DatalakeBackupActions method datalakeBackup.
@Bean(name = "DATALAKE_DATABASE_BACKUP_START_STATE")
public Action<?, ?> datalakeBackup() {
return new AbstractSdxAction<>(DatalakeDatabaseBackupStartEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeDatabaseBackupStartEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void prepareExecution(DatalakeDatabaseBackupStartEvent payload, Map<Object, Object> variables) {
super.prepareExecution(payload, variables);
if (!variables.containsKey(OPERATION_ID)) {
variables.put(OPERATION_ID, payload.getDrStatus().getOperationId());
}
if (!variables.containsKey(BACKUP_ID)) {
variables.put(BACKUP_ID, payload.getBackupRequest().getBackupId());
}
}
@Override
protected void doExecute(SdxContext context, DatalakeDatabaseBackupStartEvent payload, Map<Object, Object> variables) {
LOGGER.info("Datalake database backup has been started for {}", payload.getResourceId());
SdxCluster sdxCluster = sdxService.getById(payload.getResourceId());
eventSenderService.sendEventAndNotification(sdxCluster, context.getFlowTriggerUserCrn(), ResourceEvent.DATALAKE_DATABASE_BACKUP);
sdxBackupRestoreService.databaseBackup(payload.getDrStatus(), payload.getResourceId(), payload.getBackupRequest());
sendEvent(context, DATALAKE_DATABASE_BACKUP_IN_PROGRESS_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeDatabaseBackupStartEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseBackupCouldNotStartEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxDetachActions method sdxAttachNewClusterFailedAction.
@Bean(name = "SDX_ATTACH_NEW_CLUSTER_FAILED_STATE")
public Action<?, ?> sdxAttachNewClusterFailedAction() {
return new AbstractSdxAction<>(SdxDetachFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxDetachFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxDetachFailedEvent payload, Map<Object, Object> variables) throws Exception {
SdxCluster clusterToReattach = (SdxCluster) variables.get(DETACHED_SDX);
clusterToReattach = sdxAttachService.reattachDetachedSdxCluster(clusterToReattach);
LOGGER.info("Successfully restored detached SDX with ID {}.", clusterToReattach.getId());
sendEvent(context, SDX_DETACH_FAILED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxDetachFailedEvent payload, Optional<SdxContext> flowContext, Exception e) {
LOGGER.error("Failed to recover from detach of SDX with ID {}.", payload.getResourceId());
return payload;
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxDetachActions method sdxDetachCluster.
@Bean(name = "SDX_DETACH_CLUSTER_STATE")
public Action<?, ?> sdxDetachCluster() {
return new AbstractSdxAction<>(SdxStartDetachEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxStartDetachEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxStartDetachEvent payload, Map<Object, Object> variables) {
LOGGER.info("Detaching of SDX with ID {} in progress.", payload.getResourceId());
SdxCluster detached = sdxDetachService.detachCluster(payload.getResourceId(), payload.isDetachDuringRecovery());
variables.put(RESIZED_SDX, payload.getSdxCluster());
variables.put(DETACHED_SDX, detached);
variables.put(IS_DETACH_DURING_RECOVERY, payload.isDetachDuringRecovery());
eventSenderService.sendEventAndNotification(detached, context.getFlowTriggerUserCrn(), ResourceEvent.SDX_DETACH_STARTED, List.of(detached.getClusterName()));
sendEvent(context, SDX_DETACH_CLUSTER_SUCCESS_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxStartDetachEvent payload, Optional<SdxContext> flowContext, Exception e) {
return SdxDetachFailedEvent.from(payload, e);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class SdxDetachActions method sdxAttachNewCluster.
@Bean(name = "SDX_ATTACH_NEW_CLUSTER_STATE")
public Action<?, ?> sdxAttachNewCluster() {
return new AbstractSdxAction<>(SdxEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, SdxEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, SdxEvent payload, Map<Object, Object> variables) throws Exception {
SdxCluster detachedCluster = (SdxCluster) variables.get(DETACHED_SDX);
sdxDetachService.markAsDetached(detachedCluster.getId());
eventSenderService.notifyEvent(detachedCluster, context, ResourceEvent.SDX_DETACH_FINISHED);
LOGGER.info("Detaching of SDX with ID {} finished.", detachedCluster.getId());
if (!((boolean) variables.get(IS_DETACH_DURING_RECOVERY))) {
SdxCluster resizedCluster = (SdxCluster) variables.get(RESIZED_SDX);
LOGGER.info("Attaching of SDX cluster with ID {} in progress.", resizedCluster.getId());
MDCBuilder.buildMdcContext(resizedCluster);
resizedCluster = sdxAttachService.saveSdxAndAssignResourceOwnerRole(resizedCluster);
sdxAttachService.markAsAttached(resizedCluster);
LOGGER.info("Attaching of SDX cluster with ID {} is complete.", resizedCluster.getId());
context.setSdxId(resizedCluster.getId());
}
sendEvent(context, SDX_ATTACH_NEW_CLUSTER_SUCCESS_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(SdxEvent payload, Optional<SdxContext> flowContext, Exception e) {
LOGGER.error("Failed to attach new cluster during detach.", e);
return SdxDetachFailedEvent.from(payload, e);
}
};
}
use of com.sequenceiq.datalake.flow.SdxContext in project cloudbreak by hortonworks.
the class StartDatahubActions method failedAction.
@Bean(name = "START_DATAHUB_FAILED_STATE")
public Action<?, ?> failedAction() {
return new AbstractSdxAction<>(StartDatahubFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, StartDatahubFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, StartDatahubFailedEvent payload, Map<Object, Object> variables) throws Exception {
Exception exception = payload.getException();
DatalakeStatusEnum failedStatus = DatalakeStatusEnum.START_FAILED;
LOGGER.info("Update SDX status to {} for resource: {}", failedStatus, payload.getResourceId(), exception);
String statusReason = "Datahub start failed";
if (exception.getMessage() != null) {
statusReason = exception.getMessage();
}
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
sdxStatusService.setStatusForDatalakeAndNotify(failedStatus, statusReason, payload.getResourceId());
sendEvent(context, START_DATAHUB_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(StartDatahubFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return null;
}
};
}
Aggregations