use of com.sequenceiq.datalake.flow.dr.backup.event.DatalakeTriggerBackupEvent in project cloudbreak by hortonworks.
the class SdxBackupRestoreService method triggerDatalakeBackupFlow.
private SdxBackupResponse triggerDatalakeBackupFlow(Long clusterId, String backupLocation, String backupName) {
String selector = DATALAKE_TRIGGER_BACKUP_EVENT.event();
String userId = ThreadBasedUserCrnProvider.getUserCrn();
DatalakeTriggerBackupEvent startEvent = new DatalakeTriggerBackupEvent(selector, clusterId, userId, backupLocation, backupName, DatalakeBackupFailureReason.USER_TRIGGERED);
FlowIdentifier flowIdentifier = sdxReactorFlowManager.triggerDatalakeBackupFlow(startEvent);
return new SdxBackupResponse(startEvent.getDrStatus().getOperationId(), flowIdentifier);
}
use of com.sequenceiq.datalake.flow.dr.backup.event.DatalakeTriggerBackupEvent in project cloudbreak by hortonworks.
the class DatalakeResizeFlowEventChainFactory method createFlowTriggerEventQueue.
@Override
public FlowTriggerEventQueue createFlowTriggerEventQueue(DatalakeResizeFlowChainStartEvent event) {
Queue<Selectable> chain = new ConcurrentLinkedQueue<>();
if (event.shouldTakeBackup()) {
// Take a backup
chain.add(new DatalakeTriggerBackupEvent(DATALAKE_TRIGGER_BACKUP_EVENT.event(), event.getResourceId(), event.getUserId(), event.getBackupLocation(), "resize" + System.currentTimeMillis(), DatalakeBackupFailureReason.BACKUP_ON_RESIZE, event.accepted()));
// Stop datalake
chain.add(new SdxStartStopEvent(SDX_STOP_EVENT.event(), event.getResourceId(), event.getUserId(), STOP_DATAHUBS));
} else {
chain.add(new SdxStartStopEvent(SDX_STOP_EVENT.event(), event.getResourceId(), event.getUserId(), STOP_DATAHUBS, event.accepted()));
}
// Detach sdx from environment
chain.add(new SdxStartDetachEvent(SDX_DETACH_EVENT.event(), event.getResourceId(), event.getSdxCluster(), event.getUserId()));
// Create new
chain.add(new SdxEvent(SDX_VALIDATION_EVENT.event(), event.getResourceId(), event.getSdxCluster().getClusterName(), event.getUserId()));
if (event.shouldPerformRestore()) {
// restore the new cluster
chain.add(new DatalakeTriggerRestoreEvent(DATALAKE_TRIGGER_RESTORE_EVENT.event(), event.getResourceId(), event.getSdxCluster().getClusterName(), event.getUserId(), null, event.getBackupLocation(), null, DatalakeRestoreFailureReason.RESTORE_ON_RESIZE));
}
// Delete the detached Sdx
chain.add(new SdxDeleteStartEvent(SDX_DELETE_EVENT.event(), event.getResourceId(), event.getUserId(), true));
chain.add(new DatahubRefreshStartEvent(event.getResourceId(), event.getSdxCluster().getClusterName(), event.getUserId()));
return new FlowTriggerEventQueue(getName(), event, chain);
}
use of com.sequenceiq.datalake.flow.dr.backup.event.DatalakeTriggerBackupEvent in project cloudbreak by hortonworks.
the class DatalakeUpgradeFlowEventChainFactory method createFlowTriggerEventQueue.
@Override
public FlowTriggerEventQueue createFlowTriggerEventQueue(DatalakeUpgradeFlowChainStartEvent event) {
Queue<Selectable> chain = new ConcurrentLinkedQueue<>();
chain.add(new DatalakeTriggerBackupEvent(DATALAKE_TRIGGER_BACKUP_EVENT.event(), event.getResourceId(), event.getUserId(), event.getBackupLocation(), "", DatalakeBackupFailureReason.BACKUP_ON_UPGRADE, event.accepted()));
chain.add(new DatalakeUpgradeStartEvent(DATALAKE_UPGRADE_EVENT.event(), event.getResourceId(), event.getUserId(), event.getImageId(), event.getReplaceVms()));
return new FlowTriggerEventQueue(getName(), event, chain);
}
use of com.sequenceiq.datalake.flow.dr.backup.event.DatalakeTriggerBackupEvent in project cloudbreak by hortonworks.
the class SdxBackupRestoreService method triggerDatalakeBackupFlow.
private SdxBackupResponse triggerDatalakeBackupFlow(SdxCluster cluster, String backupLocation, String backupName) {
String selector = DATALAKE_TRIGGER_BACKUP_EVENT.event();
String userId = ThreadBasedUserCrnProvider.getUserCrn();
DatalakeTriggerBackupEvent startEvent = new DatalakeTriggerBackupEvent(selector, cluster.getId(), userId, backupLocation, backupName, DatalakeBackupFailureReason.USER_TRIGGERED);
FlowIdentifier flowIdentifier = sdxReactorFlowManager.triggerDatalakeBackupFlow(startEvent, cluster.getClusterName());
return new SdxBackupResponse(startEvent.getDrStatus().getOperationId(), flowIdentifier);
}
use of com.sequenceiq.datalake.flow.dr.backup.event.DatalakeTriggerBackupEvent in project cloudbreak by hortonworks.
the class DatalakeBackupActions method triggerDatalakeBackup.
@Bean(name = "DATALAKE_TRIGGERING_BACKUP_STATE")
public Action<?, ?> triggerDatalakeBackup() {
return new AbstractSdxAction<>(DatalakeTriggerBackupEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeTriggerBackupEvent payload) {
return SdxContext.from(flowParameters, payload);
}
@Override
protected void prepareExecution(DatalakeTriggerBackupEvent payload, Map<Object, Object> variables) {
variables.put(OPERATION_ID, payload.getDrStatus().getOperationId());
variables.put(REASON, payload.getReason().name());
super.prepareExecution(payload, variables);
}
@Override
protected void doExecute(SdxContext context, DatalakeTriggerBackupEvent payload, Map<Object, Object> variables) {
LOGGER.info("Triggering data lake backup for {}", payload.getResourceId());
SdxCluster sdxCluster = sdxService.getById(payload.getResourceId());
eventSenderService.sendEventAndNotification(sdxCluster, context.getFlowTriggerUserCrn(), ResourceEvent.DATALAKE_BACKUP_IN_PROGRESS);
DatalakeBackupStatusResponse backupStatusResponse = sdxBackupRestoreService.triggerDatalakeBackup(payload.getResourceId(), payload.getBackupLocation(), payload.getBackupName(), payload.getUserId());
variables.put(BACKUP_ID, backupStatusResponse.getBackupId());
variables.put(OPERATION_ID, backupStatusResponse.getBackupId());
payload.getDrStatus().setOperationId(backupStatusResponse.getBackupId());
if (!backupStatusResponse.failed()) {
sendEvent(context, DatalakeDatabaseBackupStartEvent.from(payload, backupStatusResponse.getBackupId()));
} else {
sendEvent(context, DATALAKE_BACKUP_FAILED_EVENT.event(), payload);
}
}
@Override
protected Object getFailurePayload(DatalakeTriggerBackupEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeBackupFailedEvent.from(payload, ex);
}
};
}
Aggregations