use of org.apache.hadoop.hive.ql.parse.repl.load.UpdatedMetaDataTracker in project hive by apache.
the class IncrementalLoadTasksBuilder method addUpdateReplStateTasks.
private List<Task<?>> addUpdateReplStateTasks(UpdatedMetaDataTracker updatedMetaDataTracker, List<Task<?>> importTasks) throws SemanticException {
// If no import tasks generated by the event then no need to update the repl state to any object.
if (importTasks.isEmpty()) {
log.debug("No objects need update of repl state: 0 import tasks");
return importTasks;
}
// Create a barrier task for dependency collection of import tasks
Task<?> barrierTask = TaskFactory.get(new DependencyCollectionWork(), conf);
List<Task<?>> tasks = new ArrayList<>();
Task<?> updateReplIdTask;
HashMap<String, Integer> dbS = new HashMap<>();
for (UpdatedMetaDataTracker.UpdateMetaData updateMetaData : updatedMetaDataTracker.getUpdateMetaDataList()) {
String replState = updateMetaData.getReplState();
String dbName = updateMetaData.getDbName();
if (dbS.get(dbName) != null) {
if (Integer.parseInt(replState) > dbS.get(dbName)) {
dbS.put(dbName, Integer.parseInt(replState));
}
} else {
dbS.put(dbName, Integer.parseInt(replState));
}
}
for (Map.Entry<String, Integer> entry : dbS.entrySet()) {
updateReplIdTask = dbUpdateReplStateTask(dbName, String.valueOf(entry.getValue()), barrierTask);
tasks.add(updateReplIdTask);
}
if (tasks.isEmpty()) {
log.debug("No objects need update of repl state: 0 update tracker tasks");
return importTasks;
}
// Link import tasks to the barrier task which will in-turn linked with repl state update tasks
DAGTraversal.traverse(importTasks, new AddDependencyToLeaves(barrierTask));
// At least one task would have been added to update the repl state
return tasks;
}
Aggregations