Search in sources :

Example 1 with DependencyKey

use of org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey in project hive by apache.

the class PostExecutePrinter method run.

public void run(QueryState queryState, Set<ReadEntity> inputs, Set<WriteEntity> outputs, LineageInfo linfo, UserGroupInformation ugi) throws Exception {
    LogHelper console = SessionState.getConsole();
    if (console == null) {
        return;
    }
    if (queryState != null) {
        console.printError("POSTHOOK: query: " + queryState.getQueryString().trim());
        console.printError("POSTHOOK: type: " + queryState.getCommandType());
    }
    PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: ");
    PreExecutePrinter.printEntities(console, outputs, "POSTHOOK: Output: ");
    // Also print out the generic lineage information if there is any
    if (linfo != null) {
        LinkedList<Map.Entry<DependencyKey, Dependency>> entry_list = new LinkedList<Map.Entry<DependencyKey, Dependency>>(linfo.entrySet());
        Collections.sort(entry_list, new DependencyKeyComp());
        Iterator<Map.Entry<DependencyKey, Dependency>> iter = entry_list.iterator();
        while (iter.hasNext()) {
            Map.Entry<DependencyKey, Dependency> it = iter.next();
            Dependency dep = it.getValue();
            DependencyKey depK = it.getKey();
            if (dep == null) {
                continue;
            }
            StringBuilder sb = new StringBuilder();
            sb.append("POSTHOOK: Lineage: ");
            if (depK.getDataContainer().isPartition()) {
                Partition part = depK.getDataContainer().getPartition();
                sb.append(part.getTableName());
                sb.append(" PARTITION(");
                int i = 0;
                for (FieldSchema fs : depK.getDataContainer().getTable().getPartitionKeys()) {
                    if (i != 0) {
                        sb.append(",");
                    }
                    sb.append(fs.getName() + "=" + part.getValues().get(i++));
                }
                sb.append(")");
            } else {
                sb.append(depK.getDataContainer().getTable().getTableName());
            }
            sb.append("." + depK.getFieldSchema().getName() + " " + dep.getType() + " ");
            sb.append("[");
            for (BaseColumnInfo col : dep.getBaseCols()) {
                sb.append("(" + col.getTabAlias().getTable().getTableName() + ")" + col.getTabAlias().getAlias() + "." + col.getColumn() + ", ");
            }
            sb.append("]");
            console.printError(sb.toString());
        }
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) DependencyKey(org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey) Dependency(org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency) LinkedList(java.util.LinkedList) BaseColumnInfo(org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo) Map(java.util.Map)

Aggregations

LinkedList (java.util.LinkedList)1 Map (java.util.Map)1 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)1 Partition (org.apache.hadoop.hive.metastore.api.Partition)1 BaseColumnInfo (org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo)1 Dependency (org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency)1 DependencyKey (org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey)1 LogHelper (org.apache.hadoop.hive.ql.session.SessionState.LogHelper)1