use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersMoveWorkloadManager method testTriggerMoveAndKill.
@Test(timeout = 60000)
public void testTriggerMoveAndKill() throws Exception {
Expression moveExpression = ExpressionFactory.fromString("EXECUTION_TIME > 1sec");
Expression killExpression = ExpressionFactory.fromString("EXECUTION_TIME > 5000ms");
Trigger moveTrigger = new ExecutionTrigger("slow_query_move", moveExpression, new Action(Action.Type.MOVE_TO_POOL, "ETL"));
Trigger killTrigger = new ExecutionTrigger("slow_query_kill", killExpression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(moveTrigger), Lists.newArrayList(killTrigger));
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
List<String> setCmds = new ArrayList<>();
setCmds.add("set hive.tez.session.events.print.summary=json");
setCmds.add("set hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.PostExecWMEventsSummaryPrinter");
setCmds.add("set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.PostExecWMEventsSummaryPrinter");
List<String> errCaptureExpect = new ArrayList<>();
errCaptureExpect.add("Workload Manager Events Summary");
errCaptureExpect.add("Event: GET Pool: BI Cluster %: 80.00");
errCaptureExpect.add("Event: MOVE Pool: ETL Cluster %: 20.00");
errCaptureExpect.add("Event: KILL Pool: null Cluster %: 0.00");
errCaptureExpect.add("Event: RETURN Pool: null Cluster %: 0.00");
errCaptureExpect.add("\"eventType\" : \"GET\"");
errCaptureExpect.add("\"eventType\" : \"MOVE\"");
errCaptureExpect.add("\"eventType\" : \"KILL\"");
errCaptureExpect.add("\"eventType\" : \"RETURN\"");
errCaptureExpect.add("\"name\" : \"slow_query_move\"");
errCaptureExpect.add("\"name\" : \"slow_query_kill\"");
// violation in BI queue
errCaptureExpect.add("\"violationMsg\" : \"Trigger " + moveTrigger + " violated");
// violation in ETL queue
errCaptureExpect.add("\"violationMsg\" : \"Trigger " + killTrigger + " violated");
errCaptureExpect.add("\"subscribedCounters\" : [ \"EXECUTION_TIME\" ]");
runQueryWithTrigger(query, setCmds, killTrigger + " violated", errCaptureExpect);
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersMoveWorkloadManager method testTriggerMoveConflictKill.
// TODO: disabling this test as tez publishes counters only after task completion which will cause write side counters
// to be not validated correctly (DAG will be completed before validation)
// @Test(timeout = 60000)
// public void testTriggerMoveKill() throws Exception {
// Expression moveExpression1 = ExpressionFactory.fromString("HDFS_BYTES_READ > 100");
// Expression moveExpression2 = ExpressionFactory.fromString("HDFS_BYTES_WRITTEN > 200");
// Trigger moveTrigger1 = new ExecutionTrigger("move_big_read", moveExpression1,
// new Action(Action.Type.MOVE_TO_POOL, "ETL"));
// Trigger killTrigger = new ExecutionTrigger("big_write_kill", moveExpression2,
// new Action(Action.Type.KILL_QUERY));
// setupTriggers(Lists.newArrayList(moveTrigger1), Lists.newArrayList(killTrigger));
// String query = "select t1.under_col, t1.value from " + tableName + " t1 join " + tableName +
// " t2 on t1.under_col>=t2.under_col order by t1.under_col, t1.value";
// List<String> setCmds = new ArrayList<>();
// setCmds.add("set hive.tez.session.events.print.summary=json");
// setCmds.add("set hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.PostExecWMEventsSummaryPrinter");
// setCmds.add("set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.PostExecWMEventsSummaryPrinter");
// List<String> errCaptureExpect = new ArrayList<>();
// errCaptureExpect.add("Workload Manager Events Summary");
// errCaptureExpect.add("Event: GET Pool: BI Cluster %: 80.00");
// errCaptureExpect.add("Event: MOVE Pool: ETL Cluster %: 20.00");
// errCaptureExpect.add("Event: KILL Pool: null Cluster %: 0.00");
// errCaptureExpect.add("Event: RETURN Pool: null Cluster %: 0.00");
// errCaptureExpect.add("\"eventType\" : \"GET\"");
// errCaptureExpect.add("\"eventType\" : \"MOVE\"");
// errCaptureExpect.add("\"eventType\" : \"KILL\"");
// errCaptureExpect.add("\"eventType\" : \"RETURN\"");
// errCaptureExpect.add("\"name\" : \"move_big_read\"");
// errCaptureExpect.add("\"name\" : \"big_write_kill\"");
// // violation in BI queue
// errCaptureExpect.add("\"violationMsg\" : \"Trigger " + moveTrigger1 + " violated");
// // violation in ETL queue
// errCaptureExpect.add("\"violationMsg\" : \"Trigger " + killTrigger + " violated");
// errCaptureExpect.add("\"subscribedCounters\" : [ \"HDFS_BYTES_READ\", \"HDFS_BYTES_WRITTEN\" ]");
// runQueryWithTrigger(query, setCmds, killTrigger + " violated", errCaptureExpect);
// }
@Test(timeout = 60000)
public void testTriggerMoveConflictKill() throws Exception {
Expression moveExpression = ExpressionFactory.fromString("HDFS_BYTES_READ > 100");
Expression killExpression = ExpressionFactory.fromString("HDFS_BYTES_READ > 100");
Trigger moveTrigger = new ExecutionTrigger("move_big_read", moveExpression, new Action(Action.Type.MOVE_TO_POOL, "ETL"));
Trigger killTrigger = new ExecutionTrigger("kill_big_read", killExpression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(moveTrigger, killTrigger), Lists.newArrayList());
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
List<String> setCmds = new ArrayList<>();
setCmds.add("set hive.tez.session.events.print.summary=json");
setCmds.add("set hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.PostExecWMEventsSummaryPrinter");
setCmds.add("set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.PostExecWMEventsSummaryPrinter");
List<String> errCaptureExpect = new ArrayList<>();
errCaptureExpect.add("Workload Manager Events Summary");
errCaptureExpect.add("Event: GET Pool: BI Cluster %: 80.00");
errCaptureExpect.add("Event: KILL Pool: null Cluster %: 0.00");
errCaptureExpect.add("Event: RETURN Pool: null Cluster %: 0.00");
errCaptureExpect.add("\"eventType\" : \"GET\"");
errCaptureExpect.add("\"eventType\" : \"KILL\"");
errCaptureExpect.add("\"eventType\" : \"RETURN\"");
errCaptureExpect.add("\"name\" : \"move_big_read\"");
errCaptureExpect.add("\"name\" : \"kill_big_read\"");
// violation in BI queue
errCaptureExpect.add("\"violationMsg\" : \"Trigger " + killTrigger + " violated");
errCaptureExpect.add("\"subscribedCounters\" : [ \"HDFS_BYTES_READ\" ]");
runQueryWithTrigger(query, setCmds, killTrigger + " violated", errCaptureExpect);
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersNoTezSessionPool method testTriggerDAGTotalTasks.
@Test(timeout = 60000)
public void testTriggerDAGTotalTasks() throws Exception {
Expression expression = ExpressionFactory.fromString("DAG_TOTAL_TASKS > 50");
Trigger trigger = new ExecutionTrigger("highly_parallel", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, getConfigs(), trigger + " violated");
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersNoTezSessionPool method testTriggerSlowQueryExecutionTime.
@Test(timeout = 60000)
public void testTriggerSlowQueryExecutionTime() throws Exception {
Expression expression = ExpressionFactory.fromString("EXECUTION_TIME > 1000");
Trigger trigger = new ExecutionTrigger("slow_query", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, null, trigger + " violated");
}
use of org.apache.hadoop.hive.ql.wm.ExecutionTrigger in project hive by apache.
the class TestTriggersTezSessionPoolManager method testTriggerSlowQueryExecutionTime.
@Test(timeout = 60000)
public void testTriggerSlowQueryExecutionTime() throws Exception {
Expression expression = ExpressionFactory.fromString("EXECUTION_TIME > 1000");
Trigger trigger = new ExecutionTrigger("slow_query", expression, new Action(Action.Type.KILL_QUERY));
setupTriggers(Lists.newArrayList(trigger));
String query = "select sleep(t1.under_col, 5), t1.value from " + tableName + " t1 join " + tableName + " t2 on t1.under_col>=t2.under_col";
runQueryWithTrigger(query, null, trigger + " violated");
}
Aggregations