use of org.apache.spark.Dependency in project hive by apache.
the class SparkUtilities method rddToString.
private static void rddToString(RDD rdd, StringBuilder sb, String offset) {
sb.append(offset).append(rdd.getClass().getCanonicalName()).append("[").append(rdd.hashCode()).append("]");
if (rdd.getStorageLevel().useMemory()) {
sb.append("(cached)");
}
sb.append("\n");
Collection<Dependency> dependencies = JavaConversions.asJavaCollection(rdd.dependencies());
if (dependencies != null) {
offset += "\t";
for (Dependency dependency : dependencies) {
RDD parentRdd = dependency.rdd();
rddToString(parentRdd, sb, offset);
}
} else if (rdd instanceof UnionRDD) {
UnionRDD unionRDD = (UnionRDD) rdd;
offset += "\t";
Collection<RDD> parentRdds = JavaConversions.asJavaCollection(unionRDD.rdds());
for (RDD parentRdd : parentRdds) {
rddToString(parentRdd, sb, offset);
}
}
}
Aggregations