use of com.sequenceiq.datalake.service.AbstractSdxAction in project cloudbreak by hortonworks.
the class DatahubRefreshActions method startDatahubRefreshAction.
@Bean(name = "DATAHUB_REFRESH_START_STATE")
public Action<?, ?> startDatahubRefreshAction() {
return new AbstractSdxAction<>(DatahubRefreshStartEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatahubRefreshStartEvent payload) {
// Only called as a part of a resize operation so we should update the cluster to reference the newly created one
SdxCluster sdxCluster = sdxService.getByNameInAccount(payload.getUserId(), payload.getSdxName());
LOGGER.info("Updating the Sdx-id in context from {} to {}", payload.getResourceId(), sdxCluster.getId());
SdxContext sdxContext = SdxContext.from(flowParameters, payload);
sdxContext.setSdxId(sdxCluster.getId());
return sdxContext;
}
@Override
protected void doExecute(SdxContext context, DatahubRefreshStartEvent payload, Map<Object, Object> variables) throws Exception {
payload = new DatahubRefreshStartEvent(context.getSdxId(), payload.getSdxName(), payload.getUserId());
LOGGER.info("Start datahub refresh associated with Sdx: {}", payload.getSdxName());
SdxCluster sdxCluster = sdxService.getById(context.getSdxId());
variables.put(SDX, sdxCluster);
eventSenderService.sendEventAndNotification(sdxCluster, context.getFlowTriggerUserCrn(), ResourceEvent.ENVIRONMENT_RESTART_DATAHUB_STARTED);
sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.RUNNING, "Datahub refresh in progress", payload.getResourceId());
sdxRefreshService.refreshAllDatahub(payload.getResourceId());
sendEvent(context, DatahubRefreshFlowEvent.DATAHUB_REFRESH_IN_PROGRESS_EVENT.selector(), payload);
}
@Override
protected Object getFailurePayload(DatahubRefreshStartEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return new DatahubRefreshFailedEvent(payload.getResourceId(), payload.getUserId(), ex);
}
};
}
use of com.sequenceiq.datalake.service.AbstractSdxAction in project cloudbreak by hortonworks.
the class DatalakeRestoreActions method restoreCouldNotStart.
@Bean(name = "DATALAKE_DATABASE_RESTORE_COULD_NOT_START_STATE")
public Action<?, ?> restoreCouldNotStart() {
return new AbstractSdxAction<>(DatalakeDatabaseRestoreCouldNotStartEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeDatabaseRestoreCouldNotStartEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, DatalakeDatabaseRestoreCouldNotStartEvent payload, Map<Object, Object> variables) {
Exception exception = payload.getException();
LOGGER.error("Datalake database restore could not be started for datalake with id: {}", payload.getResourceId(), exception);
String operationId = (String) variables.get(OPERATION_ID);
sdxBackupRestoreService.updateDatabaseStatusEntry(operationId, SdxOperationStatus.FAILED, exception.getLocalizedMessage());
sendEvent(context, DATALAKE_DATABASE_RESTORE_FAILURE_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeDatabaseRestoreCouldNotStartEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseRestoreFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.service.AbstractSdxAction in project cloudbreak by hortonworks.
the class DatalakeRestoreActions method databaseRestoreFailed.
@Bean(name = "DATALAKE_DATABASE_RESTORE_FAILED_STATE")
public Action<?, ?> databaseRestoreFailed() {
return new AbstractSdxAction<>(DatalakeDatabaseRestoreFailedEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeDatabaseRestoreFailedEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, DatalakeDatabaseRestoreFailedEvent payload, Map<Object, Object> variables) {
Exception exception = payload.getException();
LOGGER.error("Datalake database restore could not be started for datalake with id: {}", payload.getResourceId(), exception);
String operationId = (String) variables.get(OPERATION_ID);
sdxBackupRestoreService.updateDatabaseStatusEntry(operationId, SdxOperationStatus.FAILED, exception.getLocalizedMessage());
Flow flow = getFlow(context.getFlowParameters().getFlowId());
flow.setFlowFailed(payload.getException());
sendEvent(context, DATALAKE_DATABASE_RESTORE_FAILURE_HANDLED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeDatabaseRestoreFailedEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseRestoreFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.service.AbstractSdxAction in project cloudbreak by hortonworks.
the class DatalakeRestoreActions method finishedRestoreAction.
@Bean(name = "DATALAKE_RESTORE_FINISHED_STATE")
public Action<?, ?> finishedRestoreAction() {
return new AbstractSdxAction<>(DatalakeRestoreSuccessEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeRestoreSuccessEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void doExecute(SdxContext context, DatalakeRestoreSuccessEvent payload, Map<Object, Object> variables) {
LOGGER.info("Sdx database restore is finalized with sdx id: {}", payload.getResourceId());
SdxCluster sdxCluster = sdxStatusService.setStatusForDatalakeAndNotify(DatalakeStatusEnum.RUNNING, ResourceEvent.DATALAKE_RESTORE_FINISHED, "Datalake restore finished, Datalake is running", payload.getResourceId());
eventSenderService.sendEventAndNotification(sdxCluster, context.getFlowTriggerUserCrn(), ResourceEvent.DATALAKE_RESTORE_FINISHED);
metricService.incrementMetricCounter(MetricType.SDX_RESTORE_FINISHED, sdxCluster);
sendEvent(context, DATALAKE_DATABASE_RESTORE_FINALIZED_EVENT.event(), payload);
}
@Override
protected Object getFailurePayload(DatalakeRestoreSuccessEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeDatabaseRestoreFailedEvent.from(payload, ex);
}
};
}
use of com.sequenceiq.datalake.service.AbstractSdxAction in project cloudbreak by hortonworks.
the class DatalakeRestoreActions method triggerDatalakeRestore.
@Bean(name = "DATALAKE_TRIGGERING_RESTORE_STATE")
public Action<?, ?> triggerDatalakeRestore() {
return new AbstractSdxAction<>(DatalakeTriggerRestoreEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeTriggerRestoreEvent payload) {
// When SDX is created as part of re-size flow chain, SDX in payload will not have the correct ID.
Optional<FlowLog> lastFlowLog = flowLogService.getLastFlowLog(flowParameters.getFlowId());
if (lastFlowLog.isPresent()) {
SdxContext sdxContext;
Optional<FlowChainLog> flowChainLog = flowChainLogService.findFirstByFlowChainIdOrderByCreatedDesc(lastFlowLog.get().getFlowChainId());
if (flowChainLog.isPresent() && flowChainLog.get().getFlowChainType().equals(DatalakeResizeFlowEventChainFactory.class.getSimpleName())) {
SdxCluster sdxCluster = sdxService.getByNameInAccount(payload.getUserId(), payload.getSdxName());
LOGGER.info("Updating the Sdx-id in context from {} to {}", payload.getResourceId(), sdxCluster.getId());
payload.getDrStatus().setSdxClusterId(sdxCluster.getId());
sdxContext = SdxContext.from(flowParameters, payload);
sdxContext.setSdxId(sdxCluster.getId());
return sdxContext;
}
}
return SdxContext.from(flowParameters, payload);
}
@Override
protected void prepareExecution(DatalakeTriggerRestoreEvent payload, Map<Object, Object> variables) {
super.prepareExecution(payload, variables);
}
@Override
protected void doExecute(SdxContext context, DatalakeTriggerRestoreEvent payload, Map<Object, Object> variables) {
DatalakeRestoreStatusResponse restoreStatusResponse = sdxBackupRestoreService.triggerDatalakeRestore(context.getSdxId(), payload.getBackupId(), payload.getBackupLocationOverride(), payload.getUserId());
variables.put(RESTORE_ID, restoreStatusResponse.getRestoreId());
variables.put(BACKUP_ID, restoreStatusResponse.getBackupId());
variables.put(OPERATION_ID, restoreStatusResponse.getRestoreId());
payload.getDrStatus().setOperationId(restoreStatusResponse.getRestoreId());
if (!restoreStatusResponse.failed()) {
sendEvent(context, DatalakeDatabaseRestoreStartEvent.from(payload, context.getSdxId(), restoreStatusResponse.getBackupId(), restoreStatusResponse.getRestoreId()));
} else {
LOGGER.error("Datalake restore has failed for {} ", context.getSdxId());
sendEvent(context, DATALAKE_RESTORE_FAILED_EVENT.event(), payload);
}
}
@Override
protected Object getFailurePayload(DatalakeTriggerRestoreEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeRestoreFailedEvent.from(flowContext, payload, ex);
}
};
}
Aggregations