use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersTezSessionPoolManager method testTriggerCustomReadOps.
@Test(timeout = 60000)
public void testTriggerCustomReadOps() throws Exception {
Expression expression = ExpressionFactory.fromString("HDFS_READ_OPS > 50");
Trigger trigger = new ExecutionTrigger("high_read_ops", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, getConfigs(), trigger + " violated");
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersTezSessionPoolManager method testTriggerCustomCreatedDynamicPartitionsUnionAll.
@Test(timeout = 60000)
public void testTriggerCustomCreatedDynamicPartitionsUnionAll() throws Exception {
List<String> cmds = getConfigs();
cmds.add("drop table src2");
cmds.add("create table src2 (key int) partitioned by (value string)");
// query will get cancelled before creating 57 partitions
String query = "insert overwrite table src2 partition (value) " + "select temps.* from (" + "select * from " + tableName + " where under_col < 100 " + "union all " + "select * from " + tableName + " where under_col >= 100 and under_col < 200) temps";
Expression expression = ExpressionFactory.fromString("CREATED_DYNAMIC_PARTITIONS > 70");
Trigger trigger = new ExecutionTrigger("high_partitions", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
runQueryWithTrigger(query, cmds, trigger + " violated");
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersTezSessionPoolManager method testTriggerDagRawInputSplitsKill.
@Test(timeout = 60000)
public void testTriggerDagRawInputSplitsKill() throws Exception {
// Map 1 - 55 splits
// Map 3 - 55 splits
Expression expression = ExpressionFactory.fromString("DAG_RAW_INPUT_SPLITS > 100");
Trigger trigger = new ExecutionTrigger("highly_parallel", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select t1.under_col, t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, getConfigs(), "Query was cancelled");
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersTezSessionPoolManager method testTriggerDefaultRawInputSplits.
@Test(timeout = 60000)
public void testTriggerDefaultRawInputSplits() throws Exception {
// Map 1 - 55 splits
// Map 3 - 55 splits
Expression expression = ExpressionFactory.fromString("RAW_INPUT_SPLITS > 50");
Trigger trigger = new ExecutionTrigger("highly_parallel", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select t1.under_col, t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, getConfigs(), "Query was cancelled");
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersTezSessionPoolManager method testTriggerTotalTasks.
@Test(timeout = 60000)
public void testTriggerTotalTasks() throws Exception {
Expression expression = ExpressionFactory.fromString("VERTEX_TOTAL_TASKS > 50");
Trigger trigger = new ExecutionTrigger("highly_parallel", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, getConfigs(), trigger + " violated");
}
Aggregations