use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.
the class AMRMTokenSecretManager method rollMasterKey.
@Private
void rollMasterKey() {
this.writeLock.lock();
try {
LOG.info("Rolling master-key for amrm-tokens");
this.nextMasterKey = createNewMasterKey();
AMRMTokenSecretManagerState state = AMRMTokenSecretManagerState.newInstance(this.currentMasterKey.getMasterKey(), this.nextMasterKey.getMasterKey());
rmContext.getStateStore().storeOrUpdateAMRMTokenSecretManager(state, true);
this.timer.schedule(new NextKeyActivator(), this.activationDelay);
} finally {
this.writeLock.unlock();
}
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.
the class AMRMTokenSecretManager method createPassword.
@Override
@Private
protected byte[] createPassword(AMRMTokenIdentifier identifier) {
this.readLock.lock();
try {
ApplicationAttemptId applicationAttemptId = identifier.getApplicationAttemptId();
LOG.info("Creating password for " + applicationAttemptId);
return createPassword(identifier.getBytes(), getMasterKey().getSecretKey());
} finally {
this.readLock.unlock();
}
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.
the class RMProxy method createRMProxy.
/**
* Currently, used by NodeManagers only.
* Create a proxy for the specified protocol. For non-HA,
* this is a direct connection to the ResourceManager address. When HA is
* enabled, the proxy handles the failover between the ResourceManagers as
* well.
*/
@Private
protected static <T> T createRMProxy(final Configuration configuration, final Class<T> protocol, RMProxy instance, final long retryTime, final long retryInterval) throws IOException {
YarnConfiguration conf = (configuration instanceof YarnConfiguration) ? (YarnConfiguration) configuration : new YarnConfiguration(configuration);
RetryPolicy retryPolicy = createRetryPolicy(conf, retryTime, retryInterval, HAUtil.isHAEnabled(conf));
return newProxyInstance(conf, protocol, instance, retryPolicy);
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.
the class LogCLIHelpers method printAContainerLogMetadata.
@Private
public int printAContainerLogMetadata(ContainerLogsRequest options, PrintStream out, PrintStream err) throws IOException {
ApplicationId appId = options.getAppId();
String appOwner = options.getAppOwner();
String nodeId = options.getNodeId();
String containerIdStr = options.getContainerId();
List<ContainerLogMeta> containersLogMeta;
try {
containersLogMeta = LogToolUtils.getContainerLogMetaFromRemoteFS(conf, appId, containerIdStr, nodeId, appOwner);
} catch (Exception ex) {
err.println(ex.getMessage());
return -1;
}
if (containersLogMeta.isEmpty()) {
if (containerIdStr != null && nodeId != null) {
err.println("The container " + containerIdStr + " couldn't be found " + "on the node specified: " + nodeId);
} else if (nodeId != null) {
err.println("Can not find log metadata for any containers on " + nodeId);
} else if (containerIdStr != null) {
err.println("Can not find log metadata for container: " + containerIdStr);
}
return -1;
}
for (ContainerLogMeta containerLogMeta : containersLogMeta) {
String containerString = String.format(CONTAINER_ON_NODE_PATTERN, containerLogMeta.getContainerId(), containerLogMeta.getNodeId());
out.println(containerString);
out.println(StringUtils.repeat("=", containerString.length()));
out.printf(PER_LOG_FILE_INFO_PATTERN, "LogFile", "LogLength", "LastModificationTime", "LogAggregationType");
out.println(StringUtils.repeat("=", containerString.length() * 2));
for (PerContainerLogFileInfo logMeta : containerLogMeta.getContainerLogMeta()) {
out.printf(PER_LOG_FILE_INFO_PATTERN, logMeta.getFileName(), logMeta.getFileSize(), logMeta.getLastModifiedTime(), "AGGREGATED");
}
}
return 0;
}
use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.
the class LogCLIHelpers method createPrintStream.
@Private
public PrintStream createPrintStream(String localDir, String nodeId, String containerId) throws IOException {
PrintStream out = System.out;
if (localDir != null && !localDir.isEmpty()) {
Path nodePath = new Path(localDir, LogAggregationUtils.getNodeString(nodeId));
Files.createDirectories(Paths.get(nodePath.toString()));
Path containerLogPath = new Path(nodePath, containerId);
out = new PrintStream(containerLogPath.toString(), "UTF-8");
}
return out;
}
Aggregations