use of org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager in project hadoop by apache.
the class EntityGroupFSTimelineStore method serviceStart.
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
LOG.info("Starting {}", getName());
summaryStore.start();
Configuration conf = getConfig();
aclManager = new TimelineACLsManager(conf);
aclManager.setTimelineStore(summaryStore);
summaryTdm = new TimelineDataManager(summaryStore, aclManager);
summaryTdm.init(conf);
addService(summaryTdm);
// start child services that aren't already started
super.serviceStart();
if (!fs.exists(activeRootPath)) {
fs.mkdirs(activeRootPath);
fs.setPermission(activeRootPath, ACTIVE_DIR_PERMISSION);
}
if (!fs.exists(doneRootPath)) {
fs.mkdirs(doneRootPath);
fs.setPermission(doneRootPath, DONE_DIR_PERMISSION);
}
objMapper = new ObjectMapper();
objMapper.setAnnotationIntrospector(new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
jsonFactory = new MappingJsonFactory(objMapper);
final long scanIntervalSecs = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SCAN_INTERVAL_SECONDS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SCAN_INTERVAL_SECONDS_DEFAULT);
final long cleanerIntervalSecs = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_CLEANER_INTERVAL_SECONDS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_CLEANER_INTERVAL_SECONDS_DEFAULT);
final int numThreads = conf.getInt(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_THREADS, YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_THREADS_DEFAULT);
LOG.info("Scanning active directory {} every {} seconds", activeRootPath, scanIntervalSecs);
LOG.info("Cleaning logs every {} seconds", cleanerIntervalSecs);
executor = new ScheduledThreadPoolExecutor(numThreads, new ThreadFactoryBuilder().setNameFormat("EntityLogPluginWorker #%d").build());
executor.scheduleAtFixedRate(new EntityLogScanner(), 0, scanIntervalSecs, TimeUnit.SECONDS);
executor.scheduleAtFixedRate(new EntityLogCleaner(), cleanerIntervalSecs, cleanerIntervalSecs, TimeUnit.SECONDS);
}
use of org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager in project hadoop by apache.
the class TestApplicationHistoryManagerOnTimelineStore method setup.
@Before
public void setup() throws Exception {
// Only test the ACLs of the generic history
TimelineACLsManager aclsManager = new TimelineACLsManager(new YarnConfiguration());
aclsManager.setTimelineStore(store);
TimelineDataManager dataManager = new TimelineDataManager(store, aclsManager);
dataManager.init(conf);
ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf);
historyManager = new ApplicationHistoryManagerOnTimelineStore(dataManager, appAclsManager);
historyManager.init(conf);
historyManager.start();
}
use of org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager in project hadoop by apache.
the class TestAHSWebServices method setupClass.
@BeforeClass
public static void setupClass() throws Exception {
conf = new YarnConfiguration();
TimelineStore store = TestApplicationHistoryManagerOnTimelineStore.createStore(MAX_APPS);
TimelineACLsManager aclsManager = new TimelineACLsManager(conf);
aclsManager.setTimelineStore(store);
TimelineDataManager dataManager = new TimelineDataManager(store, aclsManager);
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
conf.set(YarnConfiguration.YARN_ADMIN_ACL, "foo");
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir);
dataManager.init(conf);
ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf);
ApplicationHistoryManagerOnTimelineStore historyManager = new ApplicationHistoryManagerOnTimelineStore(dataManager, appAclsManager);
historyManager.init(conf);
historyClientService = new ApplicationHistoryClientService(historyManager) {
@Override
protected void serviceStart() throws Exception {
// Do Nothing
}
};
historyClientService.init(conf);
historyClientService.start();
ahsWebservice = new AHSWebServices(historyClientService, conf) {
@Override
public String getNMWebAddressFromRM(Configuration configuration, String nodeId) throws ClientHandlerException, UniformInterfaceException, JSONException {
if (nodeId.equals(NM_ID)) {
return NM_WEBADDRESS;
}
return null;
}
};
fs = FileSystem.get(conf);
GuiceServletConfig.setInjector(Guice.createInjector(new WebServletModule()));
}
use of org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager in project hadoop by apache.
the class TestTimelineDataManager method setup.
@Before
public void setup() throws Exception {
fsPath = new File("target", this.getClass().getSimpleName() + "-tmpDir").getAbsoluteFile();
fsContext = FileContext.getLocalFSFileContext();
fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
Configuration conf = new YarnConfiguration();
conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH, fsPath.getAbsolutePath());
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false);
store = new LeveldbTimelineStore();
store.init(conf);
store.start();
loadTestEntityData();
loadVerificationEntityData();
loadTestDomainData();
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false);
aclsManager = new TimelineACLsManager(conf);
aclsManager.setTimelineStore(store);
dataManaer = new TimelineDataManager(store, aclsManager);
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin");
dataManaer.init(conf);
adminACLsManager = new AdminACLsManager(conf);
}
use of org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager in project hadoop by apache.
the class PluginStoreTestUtils method getTdmWithStore.
static TimelineDataManager getTdmWithStore(Configuration config, TimelineStore store) {
TimelineACLsManager aclManager = new TimelineACLsManager(config);
TimelineDataManager tdm = new TimelineDataManager(store, aclManager);
tdm.init(config);
return tdm;
}
Aggregations