use of org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork in project hive by apache.
the class ReplicationSemanticAnalyzer method analyzeReplDump.
// REPL DUMP
private void analyzeReplDump(ASTNode ast) throws SemanticException {
try {
initReplDump(ast);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
try {
ctx.setResFile(ctx.getLocalTmpPath());
Task<ReplDumpWork> replDumpWorkTask = TaskFactory.get(new ReplDumpWork(replScope, ASTErrorUtils.getMsg(ErrorMsg.INVALID_PATH.getMsg(), ast), ctx.getResFile().toUri().toString()), conf);
rootTasks.add(replDumpWorkTask);
for (String dbName : Utils.matchesDb(db, replScope.getDbName())) {
if (!replScope.includeAllTables()) {
for (String tblName : Utils.matchesTbl(db, dbName, replScope)) {
inputs.add(new ReadEntity(db.getTable(dbName, tblName)));
}
} else {
inputs.add(new ReadEntity(db.getDatabase(dbName)));
}
}
setFetchTask(createFetchTask(dumpSchema));
} catch (Exception e) {
// TODO : simple wrap & rethrow for now, clean up with error codes
LOG.warn("Error during analyzeReplDump", e);
throw new SemanticException(e);
}
}
use of org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork in project hive by apache.
the class TestReplicationMetricUpdateOnFailure method testReplDumpFailure.
@Test
public void testReplDumpFailure() throws Exception {
String dumpDir = TEST_PATH + Path.SEPARATOR + testName.getMethodName();
IncrementalDumpMetricCollector metricCollector = new IncrementalDumpMetricCollector(null, TEST_PATH, conf);
ReplDumpWork replDumpWork = Mockito.mock(ReplDumpWork.class);
Mockito.when(replDumpWork.getCurrentDumpPath()).thenReturn(new Path(dumpDir));
Mockito.when(replDumpWork.getMetricCollector()).thenReturn(metricCollector);
Mockito.when(replDumpWork.dataCopyIteratorsInitialized()).thenThrow(recoverableException, nonRecoverableException);
Task replDumpTask = TaskFactory.get(replDumpWork, conf);
String stageName = "REPL_DUMP";
metricCollector.reportStageStart(stageName, new HashMap<>());
Assert.assertThrows(RuntimeException.class, () -> replDumpTask.execute());
performRecoverableChecks(stageName);
metricCollector.reportStageStart(stageName, new HashMap<>());
Assert.assertThrows(RuntimeException.class, () -> replDumpTask.execute());
performNonRecoverableChecks(dumpDir, stageName);
}
use of org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork in project hive by apache.
the class TestReplicationMetricUpdateOnFailure method testReplDumpRecoverableMissingStage.
@Test
public void testReplDumpRecoverableMissingStage() throws Exception {
String dumpDir = TEST_PATH + Path.SEPARATOR + testName.getMethodName();
MetricCollector.getInstance().deinit();
BootstrapDumpMetricCollector metricCollector = new BootstrapDumpMetricCollector(null, TEST_PATH, conf);
ReplDumpWork replDumpWork = Mockito.mock(ReplDumpWork.class);
Mockito.when(replDumpWork.getMetricCollector()).thenReturn(metricCollector);
Mockito.when(replDumpWork.getCurrentDumpPath()).thenReturn(new Path(dumpDir));
Mockito.when(replDumpWork.dataCopyIteratorsInitialized()).thenThrow(recoverableException);
Task<ReplDumpWork> replDumpTask = TaskFactory.get(replDumpWork, conf);
// ensure stages are missing initially and execute without reporting start metrics
Assert.assertEquals(0, MetricCollector.getInstance().getMetrics().size());
Assert.assertThrows(RuntimeException.class, () -> replDumpTask.execute());
performRecoverableChecks("REPL_DUMP");
}
use of org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork in project hive by apache.
the class TestReplicationMetricUpdateOnFailure method testReplDumpNonRecoverableMissingStage.
@Test
public void testReplDumpNonRecoverableMissingStage() throws Exception {
String dumpDir = TEST_PATH + Path.SEPARATOR + testName.getMethodName();
MetricCollector.getInstance().deinit();
IncrementalDumpMetricCollector metricCollector = new IncrementalDumpMetricCollector(null, TEST_PATH, conf);
ReplDumpWork replDumpWork = Mockito.mock(ReplDumpWork.class);
Mockito.when(replDumpWork.getCurrentDumpPath()).thenReturn(new Path(dumpDir));
Mockito.when(replDumpWork.getMetricCollector()).thenReturn(metricCollector);
Mockito.when(replDumpWork.dataCopyIteratorsInitialized()).thenThrow(nonRecoverableException);
Task replDumpTask = TaskFactory.get(replDumpWork, conf);
// ensure stages are missing initially and execute without reporting start metrics
Assert.assertEquals(0, MetricCollector.getInstance().getMetrics().size());
Assert.assertThrows(RuntimeException.class, () -> replDumpTask.execute());
performNonRecoverableChecks(dumpDir, "REPL_DUMP");
}
Aggregations