use of com.sequenceiq.datalake.flow.dr.restore.event.DatalakeTriggerRestoreEvent in project cloudbreak by hortonworks.
the class DatalakeRestoreActions method triggerDatalakeRestore.
@Bean(name = "DATALAKE_TRIGGERING_RESTORE_STATE")
public Action<?, ?> triggerDatalakeRestore() {
return new AbstractSdxAction<>(DatalakeTriggerRestoreEvent.class) {
@Override
protected SdxContext createFlowContext(FlowParameters flowParameters, StateContext<FlowState, FlowEvent> stateContext, DatalakeTriggerRestoreEvent payload) {
// When SDX is created as part of re-size flow chain, SDX in payload will not have the correct ID.
Optional<FlowLog> lastFlowLog = flowLogService.getLastFlowLog(flowParameters.getFlowId());
if (lastFlowLog.isPresent()) {
SdxContext sdxContext;
Optional<FlowChainLog> flowChainLog = flowChainLogService.findFirstByFlowChainIdOrderByCreatedDesc(lastFlowLog.get().getFlowChainId());
if (flowChainLog.isPresent() && flowChainLog.get().getFlowChainType().equals(DatalakeResizeFlowEventChainFactory.class.getSimpleName())) {
SdxCluster sdxCluster = sdxService.getByNameInAccount(payload.getUserId(), payload.getSdxName());
LOGGER.info("Updating the Sdx-id in context from {} to {}", payload.getResourceId(), sdxCluster.getId());
payload.getDrStatus().setSdxClusterId(sdxCluster.getId());
sdxContext = SdxContext.from(flowParameters, payload);
sdxContext.setSdxId(sdxCluster.getId());
return sdxContext;
}
}
return SdxContext.from(flowParameters, payload);
}
@Override
protected void prepareExecution(DatalakeTriggerRestoreEvent payload, Map<Object, Object> variables) {
super.prepareExecution(payload, variables);
}
@Override
protected void doExecute(SdxContext context, DatalakeTriggerRestoreEvent payload, Map<Object, Object> variables) {
DatalakeRestoreStatusResponse restoreStatusResponse = sdxBackupRestoreService.triggerDatalakeRestore(context.getSdxId(), payload.getBackupId(), payload.getBackupLocationOverride(), payload.getUserId());
variables.put(RESTORE_ID, restoreStatusResponse.getRestoreId());
variables.put(BACKUP_ID, restoreStatusResponse.getBackupId());
variables.put(OPERATION_ID, restoreStatusResponse.getRestoreId());
payload.getDrStatus().setOperationId(restoreStatusResponse.getRestoreId());
if (!restoreStatusResponse.failed()) {
sendEvent(context, DatalakeDatabaseRestoreStartEvent.from(payload, context.getSdxId(), restoreStatusResponse.getBackupId(), restoreStatusResponse.getRestoreId()));
} else {
LOGGER.error("Datalake restore has failed for {} ", context.getSdxId());
sendEvent(context, DATALAKE_RESTORE_FAILED_EVENT.event(), payload);
}
}
@Override
protected Object getFailurePayload(DatalakeTriggerRestoreEvent payload, Optional<SdxContext> flowContext, Exception ex) {
return DatalakeRestoreFailedEvent.from(flowContext, payload, ex);
}
};
}
use of com.sequenceiq.datalake.flow.dr.restore.event.DatalakeTriggerRestoreEvent in project cloudbreak by hortonworks.
the class DatalakeRestoreActionsTest method testGetNewSdxIdForResizeDoExecute.
@Test
public void testGetNewSdxIdForResizeDoExecute() throws Exception {
when(sdxBackupRestoreService.triggerDatalakeRestore(eq(NEW_SDX_ID), any(), any(), eq(USER_CRN))).thenReturn(new DatalakeRestoreStatusResponse(BACKUP_ID, RESTORE_ID, State.STARTED, Optional.empty()));
DatalakeTriggerRestoreEvent event = new DatalakeTriggerRestoreEvent(DATALAKE_TRIGGER_RESTORE_EVENT.event(), NEW_SDX_ID, DATALAKE_NAME, USER_CRN, null, BACKUP_LOCATION, null, DatalakeRestoreFailureReason.RESTORE_ON_RESIZE);
AbstractAction action = (AbstractAction) underTest.triggerDatalakeRestore();
initActionPrivateFields(action);
AbstractActionTestSupport testSupport = new AbstractActionTestSupport(action);
SdxContext context = SdxContext.from(new FlowParameters(FLOW_ID, FLOW_ID, null), event);
testSupport.doExecute(context, event, new HashMap());
ArgumentCaptor<DatalakeDatabaseRestoreStartEvent> captor = ArgumentCaptor.forClass(DatalakeDatabaseRestoreStartEvent.class);
verify(reactorEventFactory, times(1)).createEvent(any(), captor.capture());
DatalakeDatabaseRestoreStartEvent captorValue = captor.getValue();
Assertions.assertEquals(NEW_SDX_ID, captorValue.getResourceId());
}
use of com.sequenceiq.datalake.flow.dr.restore.event.DatalakeTriggerRestoreEvent in project cloudbreak by hortonworks.
the class DatalakeResizeFlowEventChainFactory method createFlowTriggerEventQueue.
@Override
public FlowTriggerEventQueue createFlowTriggerEventQueue(DatalakeResizeFlowChainStartEvent event) {
Queue<Selectable> chain = new ConcurrentLinkedQueue<>();
if (event.shouldTakeBackup()) {
// Take a backup
chain.add(new DatalakeTriggerBackupEvent(DATALAKE_TRIGGER_BACKUP_EVENT.event(), event.getResourceId(), event.getUserId(), event.getBackupLocation(), "resize" + System.currentTimeMillis(), DatalakeBackupFailureReason.BACKUP_ON_RESIZE, event.accepted()));
// Stop datalake
chain.add(new SdxStartStopEvent(SDX_STOP_EVENT.event(), event.getResourceId(), event.getUserId(), STOP_DATAHUBS));
} else {
chain.add(new SdxStartStopEvent(SDX_STOP_EVENT.event(), event.getResourceId(), event.getUserId(), STOP_DATAHUBS, event.accepted()));
}
// Detach sdx from environment
chain.add(new SdxStartDetachEvent(SDX_DETACH_EVENT.event(), event.getResourceId(), event.getSdxCluster(), event.getUserId()));
// Create new
chain.add(new SdxEvent(SDX_VALIDATION_EVENT.event(), event.getResourceId(), event.getSdxCluster().getClusterName(), event.getUserId()));
if (event.shouldPerformRestore()) {
// restore the new cluster
chain.add(new DatalakeTriggerRestoreEvent(DATALAKE_TRIGGER_RESTORE_EVENT.event(), event.getResourceId(), event.getSdxCluster().getClusterName(), event.getUserId(), null, event.getBackupLocation(), null, DatalakeRestoreFailureReason.RESTORE_ON_RESIZE));
}
// Delete the detached Sdx
chain.add(new SdxDeleteStartEvent(SDX_DELETE_EVENT.event(), event.getResourceId(), event.getUserId(), true));
chain.add(new DatahubRefreshStartEvent(event.getResourceId(), event.getSdxCluster().getClusterName(), event.getUserId()));
return new FlowTriggerEventQueue(getName(), event, chain);
}
use of com.sequenceiq.datalake.flow.dr.restore.event.DatalakeTriggerRestoreEvent in project cloudbreak by hortonworks.
the class SdxBackupRestoreService method triggerDatalakeRestoreFlow.
private SdxRestoreResponse triggerDatalakeRestoreFlow(SdxCluster cluster, String backupId, String backupLocation, String backupLocationOverride) {
String selector = DATALAKE_TRIGGER_RESTORE_EVENT.event();
String userId = ThreadBasedUserCrnProvider.getUserCrn();
DatalakeTriggerRestoreEvent startEvent = new DatalakeTriggerRestoreEvent(selector, cluster.getId(), null, userId, backupId, backupLocation, backupLocationOverride, DatalakeRestoreFailureReason.USER_TRIGGERED);
FlowIdentifier flowIdentifier = sdxReactorFlowManager.triggerDatalakeRestoreFlow(startEvent, cluster.getClusterName());
return new SdxRestoreResponse(startEvent.getDrStatus().getOperationId(), flowIdentifier);
}
use of com.sequenceiq.datalake.flow.dr.restore.event.DatalakeTriggerRestoreEvent in project cloudbreak by hortonworks.
the class DatalakeRestoreActionsTest method testGetNewSdxIdForResizeCreateFlowContext.
@Test
public void testGetNewSdxIdForResizeCreateFlowContext() throws Exception {
SdxCluster sdxCluster = genCluster();
FlowParameters flowParameters = new FlowParameters(FLOW_ID, null, null);
when(sdxService.getByNameInAccount(eq(USER_CRN), eq(DATALAKE_NAME))).thenReturn(sdxCluster);
when(flowLogService.getLastFlowLog(anyString())).thenReturn(Optional.of(new FlowLog()));
FlowChainLog flowChainLog = new FlowChainLog();
flowChainLog.setFlowChainType(DatalakeResizeFlowEventChainFactory.class.getSimpleName());
when(flowChainLogService.findFirstByFlowChainIdOrderByCreatedDesc(any())).thenReturn(Optional.of(flowChainLog));
DatalakeTriggerRestoreEvent event = new DatalakeTriggerRestoreEvent(DATALAKE_TRIGGER_RESTORE_EVENT.event(), OLD_SDX_ID, DATALAKE_NAME, USER_CRN, null, BACKUP_LOCATION, null, DatalakeRestoreFailureReason.RESTORE_ON_RESIZE);
AbstractAction action = (AbstractAction) underTest.triggerDatalakeRestore();
initActionPrivateFields(action);
AbstractActionTestSupport testSupport = new AbstractActionTestSupport(action);
SdxContext context = (SdxContext) testSupport.createFlowContext(flowParameters, null, event);
Assert.assertEquals(NEW_SDX_ID, context.getSdxId());
Assert.assertEquals(USER_CRN, context.getUserId());
Assert.assertEquals(FLOW_ID, context.getFlowId());
Assert.assertEquals(NEW_SDX_ID, event.getDrStatus().getSdxClusterId());
flowChainLog.setFlowChainType(DatalakeUpgradeFlowEventChainFactory.class.getSimpleName());
context = (SdxContext) testSupport.createFlowContext(flowParameters, null, event);
Assert.assertEquals(OLD_SDX_ID, context.getSdxId());
}
Aggregations