use of org.apache.hadoop.yarn.api.records.timeline.TimelineDomain in project hadoop by apache.
the class TimelineStoreTestUtils method testGetDomain.
public void testGetDomain() throws IOException {
TimelineDomain actualDomain1 = store.getDomain(domain1.getId());
verifyDomainInfo(domain1, actualDomain1);
assertTrue(actualDomain1.getCreatedTime() > 0);
assertTrue(actualDomain1.getModifiedTime() > 0);
assertEquals(actualDomain1.getCreatedTime(), actualDomain1.getModifiedTime());
TimelineDomain actualDomain2 = store.getDomain(domain2.getId());
verifyDomainInfo(domain2, actualDomain2);
assertEquals("domain_id_2", actualDomain2.getId());
assertTrue(actualDomain2.getCreatedTime() > 0);
assertTrue(actualDomain2.getModifiedTime() > 0);
assertTrue(actualDomain2.getCreatedTime() < actualDomain2.getModifiedTime());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineDomain in project hadoop by apache.
the class KeyValueBasedTimelineStore method getDomains.
@Override
public TimelineDomains getDomains(String owner) throws IOException {
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
return null;
}
List<TimelineDomain> domains = new ArrayList<TimelineDomain>();
Set<TimelineDomain> domainsOfOneOwner = domainsByOwner.get(owner);
if (domainsOfOneOwner == null) {
return new TimelineDomains();
}
for (TimelineDomain domain : domainsByOwner.get(owner)) {
TimelineDomain domainToReturn = KeyValueBasedTimelineStoreUtils.createTimelineDomain(domain.getId(), domain.getDescription(), domain.getOwner(), domain.getReaders(), domain.getWriters(), domain.getCreatedTime(), domain.getModifiedTime());
domains.add(domainToReturn);
}
Collections.sort(domains, new Comparator<TimelineDomain>() {
@Override
public int compare(TimelineDomain domain1, TimelineDomain domain2) {
int result = domain2.getCreatedTime().compareTo(domain1.getCreatedTime());
if (result == 0) {
return domain2.getModifiedTime().compareTo(domain1.getModifiedTime());
} else {
return result;
}
}
});
TimelineDomains domainsToReturn = new TimelineDomains();
domainsToReturn.addDomains(domains);
return domainsToReturn;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineDomain in project hadoop by apache.
the class RollingLevelDBTimelineStore method getTimelineDomain.
private static TimelineDomain getTimelineDomain(DBIterator iterator, String domainId, byte[] prefix) throws IOException {
// Iterate over all the rows whose key starts with prefix to retrieve the
// domain information.
TimelineDomain domain = new TimelineDomain();
domain.setId(domainId);
boolean noRows = true;
for (; iterator.hasNext(); iterator.next()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key)) {
break;
}
if (noRows) {
noRows = false;
}
byte[] value = iterator.peekNext().getValue();
if (value != null && value.length > 0) {
if (key[prefix.length] == DESCRIPTION_COLUMN[0]) {
domain.setDescription(new String(value, UTF_8));
} else if (key[prefix.length] == OWNER_COLUMN[0]) {
domain.setOwner(new String(value, UTF_8));
} else if (key[prefix.length] == READER_COLUMN[0]) {
domain.setReaders(new String(value, UTF_8));
} else if (key[prefix.length] == WRITER_COLUMN[0]) {
domain.setWriters(new String(value, UTF_8));
} else if (key[prefix.length] == TIMESTAMP_COLUMN[0]) {
domain.setCreatedTime(readReverseOrderedLong(value, 0));
domain.setModifiedTime(readReverseOrderedLong(value, 8));
} else {
LOG.error("Unrecognized domain column: " + key[prefix.length]);
}
}
}
if (noRows) {
return null;
} else {
return domain;
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineDomain in project hadoop by apache.
the class Client method prepareTimelineDomain.
private void prepareTimelineDomain() {
TimelineClient timelineClient = null;
if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
timelineClient = TimelineClient.createTimelineClient();
timelineClient.init(conf);
timelineClient.start();
} else {
LOG.warn("Cannot put the domain " + domainId + " because the timeline service is not enabled");
return;
}
try {
//TODO: we need to check and combine the existing timeline domain ACLs,
//but let's do it once we have client java library to query domains.
TimelineDomain domain = new TimelineDomain();
domain.setId(domainId);
domain.setReaders(viewACLs != null && viewACLs.length() > 0 ? viewACLs : " ");
domain.setWriters(modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " ");
timelineClient.putDomain(domain);
LOG.info("Put the timeline domain: " + TimelineUtils.dumpTimelineRecordtoJSON(domain));
} catch (Exception e) {
LOG.error("Error when putting the timeline domain", e);
} finally {
timelineClient.stop();
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineDomain in project hadoop by apache.
the class TestDistributedShell method testDSShell.
public void testDSShell(boolean haveDomain, boolean defaultFlow) throws Exception {
String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "2", "--shell_command", Shell.WINDOWS ? "dir" : "ls", "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores", "1" };
if (haveDomain) {
String[] domainArgs = { "--domain", "TEST_DOMAIN", "--view_acls", "reader_user reader_group", "--modify_acls", "writer_user writer_group", "--create" };
args = mergeArgs(args, domainArgs);
}
boolean isTestingTimelineV2 = false;
if (timelineVersionWatcher.getTimelineVersion() == 2.0f) {
isTestingTimelineV2 = true;
if (!defaultFlow) {
String[] flowArgs = { "--flow_name", "test_flow_name", "--flow_version", "test_flow_version", "--flow_run_id", "12345678" };
args = mergeArgs(args, flowArgs);
}
LOG.info("Setup: Using timeline v2!");
}
LOG.info("Initializing DS Client");
final Client client = new Client(new Configuration(yarnCluster.getConfig()));
boolean initSuccess = client.init(args);
Assert.assertTrue(initSuccess);
LOG.info("Running DS Client");
final AtomicBoolean result = new AtomicBoolean(false);
Thread t = new Thread() {
public void run() {
try {
result.set(client.run());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
t.start();
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(new Configuration(yarnCluster.getConfig()));
yarnClient.start();
String hostName = NetUtils.getHostname();
boolean verified = false;
String errorMessage = "";
ApplicationId appId = null;
ApplicationReport appReport = null;
while (!verified) {
List<ApplicationReport> apps = yarnClient.getApplications();
if (apps.size() == 0) {
Thread.sleep(10);
continue;
}
appReport = apps.get(0);
appId = appReport.getApplicationId();
if (appReport.getHost().equals("N/A")) {
Thread.sleep(10);
continue;
}
errorMessage = "Expected host name to start with '" + hostName + "', was '" + appReport.getHost() + "'. Expected rpc port to be '-1', was '" + appReport.getRpcPort() + "'.";
if (checkHostname(appReport.getHost()) && appReport.getRpcPort() == -1) {
verified = true;
}
if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED && appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) {
break;
}
}
Assert.assertTrue(errorMessage, verified);
t.join();
LOG.info("Client run completed for testDSShell. Result=" + result);
Assert.assertTrue(result.get());
if (timelineVersionWatcher.getTimelineVersion() == 1.5f) {
long scanInterval = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SCAN_INTERVAL_SECONDS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SCAN_INTERVAL_SECONDS_DEFAULT);
Path doneDir = new Path(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_DONE_DIR_DEFAULT);
// Wait till the data is moved to done dir, or timeout and fail
while (true) {
RemoteIterator<FileStatus> iterApps = fs.listStatusIterator(doneDir);
if (iterApps.hasNext()) {
break;
}
Thread.sleep(scanInterval * 2);
}
}
TimelineDomain domain = null;
if (!isTestingTimelineV2) {
checkTimelineV1(haveDomain);
} else {
checkTimelineV2(haveDomain, appId, defaultFlow, appReport);
}
}
Aggregations