use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestReplicationMetricUpdateOnFailure method testRecoverableDDLFailureWithStageMissing.
@Test
public void testRecoverableDDLFailureWithStageMissing() throws Exception {
// task-setup for DDL-Task
DDLWork ddlWork = Mockito.mock(DDLWork.class);
Context context = Mockito.mock(Context.class);
Mockito.when(context.getExplainAnalyze()).thenReturn(ExplainConfiguration.AnalyzeState.ANALYZING);
Mockito.when(ddlWork.isReplication()).thenReturn(true);
String dumpDir = TEST_PATH + Path.SEPARATOR + testName.getMethodName();
Mockito.when(ddlWork.getDumpDirectory()).thenReturn(dumpDir);
Task<DDLWork> ddlTask = TaskFactory.get(ddlWork, conf);
ddlTask.initialize(null, null, null, context);
MetricCollector.getInstance().deinit();
IncrementalLoadMetricCollector metricCollector = new IncrementalLoadMetricCollector(null, TEST_PATH, 1, conf);
// ensure stages are missing initially and execute without reporting start metrics
Assert.assertEquals(0, MetricCollector.getInstance().getMetrics().size());
Map<String, Long> metricMap = new HashMap<>();
Mockito.when(ddlWork.getMetricCollector()).thenReturn(metricCollector);
Mockito.when(ddlWork.getDDLDesc()).thenThrow(recoverableException);
// test recoverable error during DDL-Task
ddlTask.execute();
performRecoverableChecks("REPL_LOAD");
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestReplicationMetricUpdateOnFailure method testNonRecoverableDDLFailureWithStageMissing.
@Test
public void testNonRecoverableDDLFailureWithStageMissing() throws Exception {
// task-setup for DDL-Task
DDLWork ddlWork = Mockito.mock(DDLWork.class);
Context context = Mockito.mock(Context.class);
Mockito.when(context.getExplainAnalyze()).thenReturn(ExplainConfiguration.AnalyzeState.ANALYZING);
Mockito.when(ddlWork.isReplication()).thenReturn(true);
String dumpDir = TEST_PATH + Path.SEPARATOR + testName.getMethodName();
Mockito.when(ddlWork.getDumpDirectory()).thenReturn(dumpDir);
Task<DDLWork> ddlTask = TaskFactory.get(ddlWork, conf);
ddlTask.initialize(null, null, null, context);
MetricCollector.getInstance().deinit();
IncrementalLoadMetricCollector metricCollector = new IncrementalLoadMetricCollector(null, TEST_PATH, 1, conf);
// ensure stages are missing initially and execute without reporting start metrics
Assert.assertEquals(0, MetricCollector.getInstance().getMetrics().size());
Map<String, Long> metricMap = new HashMap<>();
Mockito.when(ddlWork.getMetricCollector()).thenReturn(metricCollector);
Mockito.when(ddlWork.getDDLDesc()).thenThrow(nonRecoverableException);
// test non-recoverable error during DDL-Task, without initializing stage
ddlTask.execute();
performNonRecoverableChecks(dumpDir, "REPL_LOAD");
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestLineageInfo method before.
@Before
public void before() {
HiveConf conf = new HiveConf();
SessionState.start(conf);
ctx = new Context(conf);
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class UnlockTableOperation method execute.
@Override
public int execute() throws HiveException {
Context ctx = context.getContext();
HiveTxnManager txnManager = ctx.getHiveTxnManager();
return txnManager.unlockTable(context.getDb(), desc);
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestNullScanTaskDispatcher method setup.
@Before
public void setup() {
hiveConf = new HiveConf();
hiveConf.set("fs.mock.impl", MockFileSystem.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES, true);
sessionState = SessionState.start(hiveConf);
parseContext = spy(new ParseContext());
context = new Context(hiveConf);
parseContext.setTopOps(aliasToWork);
mapWork.setAliasToWork(aliasToWork);
createReduceWork();
}
Aggregations