use of org.apache.hadoop.hive.ql.exec.repl.bootstrap.AddDependencyToLeaves in project hive by apache.
the class LoadPartitions method createTableReplLogTask.
private void createTableReplLogTask() throws SemanticException {
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, tableDesc.getTableName(), tableDesc.tableType());
Task<ReplStateLogWork> replLogTask = TaskFactory.get(replLogWork, context.hiveConf);
if (tracker.tasks().isEmpty()) {
tracker.addTask(replLogTask);
} else {
DAGTraversal.traverse(tracker.tasks(), new AddDependencyToLeaves(replLogTask));
List<Task<? extends Serializable>> visited = new ArrayList<>();
tracker.updateTaskCount(replLogTask, visited);
}
}
use of org.apache.hadoop.hive.ql.exec.repl.bootstrap.AddDependencyToLeaves in project hive by apache.
the class LoadFunction method createFunctionReplLogTask.
private void createFunctionReplLogTask(List<Task<? extends Serializable>> functionTasks, String functionName) {
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, functionName);
Task<ReplStateLogWork> replLogTask = TaskFactory.get(replLogWork);
DAGTraversal.traverse(functionTasks, new AddDependencyToLeaves(replLogTask));
}
use of org.apache.hadoop.hive.ql.exec.repl.bootstrap.AddDependencyToLeaves in project hive by apache.
the class LoadTable method createTableReplLogTask.
private void createTableReplLogTask(String tableName, TableType tableType) throws SemanticException {
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, tableName, tableType);
Task<ReplStateLogWork> replLogTask = TaskFactory.get(replLogWork);
DAGTraversal.traverse(tracker.tasks(), new AddDependencyToLeaves(replLogTask));
if (tracker.tasks().isEmpty()) {
tracker.addTask(replLogTask);
} else {
DAGTraversal.traverse(tracker.tasks(), new AddDependencyToLeaves(replLogTask));
List<Task<? extends Serializable>> visited = new ArrayList<>();
tracker.updateTaskCount(replLogTask, visited);
}
}
Aggregations