use of java.util.concurrent.ConcurrentHashMap in project mapdb by jankotek.
the class ConcurrentHashMap8Test method testMerge2.
/**
* merge replaces when the given key is present
*/
public void testMerge2() {
ConcurrentHashMap map = map5();
assertEquals("Z", map.merge(one, "Y", (x, y) -> "Z"));
}
use of java.util.concurrent.ConcurrentHashMap in project mapdb by jankotek.
the class ConcurrentHashMap8Test method testCompute2.
/**
* compute adds when the given key is not present
*/
public void testCompute2() {
ConcurrentHashMap map = map5();
assertEquals("Z", map.compute(six, (x, y) -> "Z"));
}
use of java.util.concurrent.ConcurrentHashMap in project mapdb by jankotek.
the class ConcurrentHashMap8Test method map5.
/**
* Returns a new map from Integers 1-5 to Strings "A"-"E".
*/
private static ConcurrentHashMap map5() {
ConcurrentHashMap map = new ConcurrentHashMap(5);
assertTrue(map.isEmpty());
map.put(one, "A");
map.put(two, "B");
map.put(three, "C");
map.put(four, "D");
map.put(five, "E");
assertFalse(map.isEmpty());
assertEquals(5, map.size());
return map;
}
use of java.util.concurrent.ConcurrentHashMap in project hadoop by apache.
the class TestContainerLogsPage method testContainerLogPageAccess.
@Test(timeout = 10000)
public void testContainerLogPageAccess() throws IOException {
// SecureIOUtils require Native IO to be enabled. This test will run
// only if it is enabled.
assumeTrue(NativeIO.isAvailable());
String user = "randomUser" + System.currentTimeMillis();
File absLogDir = null, appDir = null, containerDir = null, syslog = null;
try {
// target log directory
absLogDir = new File("target", TestContainerLogsPage.class.getSimpleName() + "LogDir").getAbsoluteFile();
absLogDir.mkdir();
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_LOG_DIRS, absLogDir.toURI().toString());
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
NodeHealthCheckerService healthChecker = createNodeHealthCheckerService(conf);
healthChecker.init(conf);
LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler();
// Add an application and the corresponding containers
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(conf);
long clusterTimeStamp = 1234;
ApplicationId appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp, 1);
Application app = mock(Application.class);
when(app.getAppId()).thenReturn(appId);
// Making sure that application returns a random user. This is required
// for SecureIOUtils' file owner check.
when(app.getUser()).thenReturn(user);
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(appId, 1);
ContainerId container1 = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 0);
// Testing secure read access for log files
// Creating application and container directory and syslog file.
appDir = new File(absLogDir, appId.toString());
appDir.mkdir();
containerDir = new File(appDir, container1.toString());
containerDir.mkdir();
syslog = new File(containerDir, "syslog");
syslog.createNewFile();
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(syslog));
out.write("Log file Content".getBytes());
out.close();
Context context = mock(Context.class);
ConcurrentMap<ApplicationId, Application> appMap = new ConcurrentHashMap<ApplicationId, Application>();
appMap.put(appId, app);
when(context.getApplications()).thenReturn(appMap);
ConcurrentHashMap<ContainerId, Container> containers = new ConcurrentHashMap<ContainerId, Container>();
when(context.getContainers()).thenReturn(containers);
when(context.getLocalDirsHandler()).thenReturn(dirsHandler);
MockContainer container = new MockContainer(appAttemptId, new AsyncDispatcher(), conf, user, appId, 1);
container.setState(ContainerState.RUNNING);
context.getContainers().put(container1, container);
ContainersLogsBlock cLogsBlock = new ContainersLogsBlock(context);
Map<String, String> params = new HashMap<String, String>();
params.put(YarnWebParams.CONTAINER_ID, container1.toString());
params.put(YarnWebParams.CONTAINER_LOG_TYPE, "syslog");
Injector injector = WebAppTests.testPage(ContainerLogsPage.class, ContainersLogsBlock.class, cLogsBlock, params, (Module[]) null);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Exception reading log file. Application submitted by '" + user + "' doesn't own requested log file : syslog");
} finally {
if (syslog != null) {
syslog.delete();
}
if (containerDir != null) {
containerDir.delete();
}
if (appDir != null) {
appDir.delete();
}
if (absLogDir != null) {
absLogDir.delete();
}
}
}
use of java.util.concurrent.ConcurrentHashMap in project hadoop by apache.
the class TestLogAggregationService method createLogAggregationService.
private LogAggregationService createLogAggregationService(ApplicationId appId, String className, String parameters, boolean createLogAggContext) {
ConcurrentHashMap<ContainerId, Container> containers = new ConcurrentHashMap<ContainerId, Container>();
LogAggregationService logAggregationService = new LogAggregationService(dispatcher, this.context, this.delSrvc, super.dirsHandler);
logAggregationService.init(this.conf);
logAggregationService.start();
LogAggregationContext logAggContext = null;
if (createLogAggContext) {
logAggContext = Records.newRecord(LogAggregationContext.class);
logAggContext.setLogAggregationPolicyClassName(className);
if (parameters != null) {
logAggContext.setLogAggregationPolicyParameters(parameters);
}
}
logAggregationService.handle(new LogHandlerAppStartedEvent(appId, this.user, null, this.acls, logAggContext));
return logAggregationService;
}
Aggregations