use of org.apache.hadoop.classification.InterfaceAudience.Private in project tez by apache.
the class TezConverterUtils method getURIFromYarnURL.
/**
* return a {@link URI} from a given url
*
* @param url
* url to convert
* @return path from {@link URL}
* @throws URISyntaxException
*/
@Private
public static URI getURIFromYarnURL(URL url) throws URISyntaxException {
String scheme = url.getScheme() == null ? "" : url.getScheme();
String authority = "";
if (url.getHost() != null) {
authority = url.getHost();
if (url.getUserInfo() != null) {
authority = url.getUserInfo() + "@" + authority;
}
if (url.getPort() > 0) {
authority += ":" + url.getPort();
}
}
return new URI(scheme, authority, url.getFile(), null, null).normalize();
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project tez by apache.
the class VertexImpl method constructFinalFullcounters.
@Private
public void constructFinalFullcounters() {
this.fullCounters = new TezCounters();
this.fullCounters.incrAllCounters(counters);
this.vertexStats = new VertexStats();
for (Task t : this.tasks.values()) {
vertexStats.updateStats(t.getReport());
TezCounters counters = t.getCounters();
this.fullCounters.incrAllCounters(counters);
}
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project tez by apache.
the class DAGImpl method constructFinalFullcounters.
@Private
public void constructFinalFullcounters() {
this.fullCounters = new TezCounters();
this.fullCounters.incrAllCounters(dagCounters);
for (Vertex v : this.vertices.values()) {
this.fullCounters.incrAllCounters(v.getAllCounters());
}
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project tez by apache.
the class TezUtilsInternal method setSecurityUtilConfigration.
@Private
public static void setSecurityUtilConfigration(Logger log, Configuration conf) {
// Use reflection to invoke SecurityUtil.setConfiguration when available, version 2.6.0 of
// hadoop does not support it, it is currently available from 2.9.0.
// Remove this when the minimum supported hadoop version has the above method.
Class<SecurityUtil> clz = SecurityUtil.class;
try {
Method method = clz.getMethod("setConfiguration", Configuration.class);
method.invoke(null, conf);
} catch (NoSuchMethodException e) {
// This is not available, so ignore it.
} catch (SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
log.warn("Error invoking SecurityUtil.setConfiguration: ", e);
throw new TezUncheckedException("Error invoking SecurityUtil.setConfiguration", e);
}
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project hive by apache.
the class TezTask method collectCommitInformation.
private void collectCommitInformation(TezWork work) throws IOException, TezException {
for (BaseWork w : work.getAllWork()) {
JobConf jobConf = workToConf.get(w);
Vertex vertex = workToVertex.get(w);
boolean hasIcebergCommitter = Optional.ofNullable(jobConf).map(JobConf::getOutputCommitter).map(Object::getClass).map(Class::getName).filter(name -> name.endsWith("HiveIcebergNoJobCommitter")).isPresent();
// we should only consider jobs with Iceberg output committer and a data sink
if (hasIcebergCommitter && !vertex.getDataSinks().isEmpty()) {
VertexStatus status = dagClient.getVertexStatus(vertex.getName(), EnumSet.of(StatusGetOpts.GET_COUNTERS));
String[] jobIdParts = status.getId().split("_");
// status.getId() returns something like: vertex_1617722404520_0001_1_00
// this should be transformed to a parsable JobID: job_16177224045200_0001
int vertexId = Integer.parseInt(jobIdParts[jobIdParts.length - 1]);
String jobId = String.format(JOB_ID_TEMPLATE, jobIdParts[1], vertexId, jobIdParts[2]);
List<String> tables = new ArrayList<>();
Map<String, String> icebergProperties = new HashMap<>();
for (Map.Entry<String, String> entry : jobConf) {
if (entry.getKey().startsWith(ICEBERG_SERIALIZED_TABLE_PREFIX)) {
// get all target tables this vertex wrote to
tables.add(entry.getKey().substring(ICEBERG_SERIALIZED_TABLE_PREFIX.length()));
} else if (entry.getKey().startsWith(ICEBERG_PROPERTY_PREFIX)) {
// find iceberg props in jobConf as they can be needed, but not available, during job commit
icebergProperties.put(entry.getKey(), entry.getValue());
}
}
// save information for each target table
tables.forEach(table -> SessionStateUtil.addCommitInfo(jobConf, table, jobId, status.getProgress().getSucceededTaskCount(), icebergProperties));
}
}
}
Aggregations