use of org.apache.commons.collections.map.HashedMap in project topcom-cloud by 545314690.
the class UserController method getTreeOn.
private List<Resource> getTreeOn(List<Resource> resourceList) {
Map<Long, Resource> resultMap = new HashedMap();
for (int i = 0; i < resourceList.size(); i++) {
Resource resource_i = resourceList.get(i);
Long parentId_i = getResourceParentId(resource_i);
if (resultMap.keySet().contains(parentId_i)) {
continue;
}
Resource superParent_i = getResourceParent(resource_i);
for (int j = i + 1; j < resourceList.size(); j++) {
Resource resource_j = resourceList.get(j);
superParent_i = megerBtoA(superParent_i, resource_j);
}
if (!resultMap.keySet().contains(parentId_i)) {
resultMap.put(parentId_i, superParent_i);
}
}
resourceList.clear();
for (Long l : resultMap.keySet()) {
resourceList.add(resultMap.get(l));
}
return resourceList;
}
use of org.apache.commons.collections.map.HashedMap in project cas by apereo.
the class InternalGroovyScriptDao method getAttributesForUser.
@Override
public Map<String, Object> getAttributesForUser(final String uid) {
final Map<String, Object> finalAttributes = new HashedMap();
casProperties.getAuthn().getAttributeRepository().getGroovy().forEach(groovy -> {
final ClassLoader parent = getClass().getClassLoader();
try (GroovyClassLoader loader = new GroovyClassLoader(parent)) {
if (groovy.getConfig().getLocation() != null) {
final File groovyFile = groovy.getConfig().getLocation().getFile();
if (groovyFile.exists()) {
final Class<?> groovyClass = loader.parseClass(groovyFile);
LOGGER.debug("Loaded groovy class [{}] from script [{}]", groovyClass.getSimpleName(), groovyFile.getCanonicalPath());
final GroovyObject groovyObject = (GroovyObject) groovyClass.newInstance();
LOGGER.debug("Created groovy object instance from class [{}]", groovyFile.getCanonicalPath());
final Object[] args = { uid, LOGGER, casProperties, applicationContext };
LOGGER.debug("Executing groovy script's run method, with parameters [{}]", args);
final Map<String, Object> personAttributesMap = (Map<String, Object>) groovyObject.invokeMethod("run", args);
LOGGER.debug("Creating person attributes with the username [{}] and attributes [{}]", uid, personAttributesMap);
finalAttributes.putAll(personAttributesMap);
}
}
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
});
return finalAttributes;
}
use of org.apache.commons.collections.map.HashedMap in project atlas by alibaba.
the class DataBindingRenameTask method createAwbPackages.
/**
* 生成so的目录
*/
@TaskAction
void createAwbPackages() throws ExecutionException, InterruptedException {
AndroidDependencyTree androidDependencyTree = AtlasBuildContext.androidDependencyTrees.get(getVariantName());
if (null == androidDependencyTree) {
return;
}
ExecutorServicesHelper executorServicesHelper = new ExecutorServicesHelper(taskName, getLogger(), 0);
List<Runnable> runnables = new ArrayList<>();
for (final AwbBundle awbBundle : androidDependencyTree.getAwbBundles()) {
runnables.add(new Runnable() {
@Override
public void run() {
try {
File dataBindingClazzFolder = appVariantOutputContext.getVariantContext().getJAwbavaOutputDir(awbBundle);
String packageName = ManifestFileUtils.getPackage(awbBundle.getOrgManifestFile());
String appName = appVariantContext.getVariantConfiguration().getOriginalApplicationId();
//删除那些已经存在的类
File dataMapperClazz = new File(dataBindingClazzFolder, "android/databinding/DataBinderMapper.class");
if (!dataMapperClazz.exists()) {
throw new GradleException("missing datamapper class");
}
FileInputStream fileInputStream = new FileInputStream(dataMapperClazz);
ClassReader in1 = new ClassReader(fileInputStream);
ClassWriter cw = new ClassWriter(0);
Map<String, String> reMapping = new HashedMap();
reMapping.put(appName.replace(".", "/") + "/BR", packageName.replace(".", "/") + "/BR");
RemappingClassAdapter remappingClassAdapter = new RemappingClassAdapter(cw, new SimpleRemapper(reMapping));
in1.accept(remappingClassAdapter, 8);
ClassReader in2 = new ClassReader(cw.toByteArray());
ClassWriter cw2 = new ClassWriter(0);
Set<String> renames = new HashSet<String>();
renames.add("android/databinding/DataBinderMapper");
in2.accept(new ClassRenamer(cw2, renames, packageName.replace(".", "/") + "/DataBinderMapper"), 8);
File destClass = new File(dataBindingClazzFolder, packageName.replace(".", "/") + "/DataBinderMapper.class");
destClass.getParentFile().mkdirs();
FileOutputStream fileOutputStream = new FileOutputStream(destClass);
fileOutputStream.write(cw2.toByteArray());
IOUtils.closeQuietly(fileOutputStream);
IOUtils.closeQuietly(fileInputStream);
FileUtils.deleteDirectory(new File(dataBindingClazzFolder, "android/databinding"));
FileUtils.deleteDirectory(new File(dataBindingClazzFolder, "com/android/databinding"));
File appDir = new File(dataBindingClazzFolder, appName.replace(".", "/"));
if (appDir.exists()) {
File[] files = appDir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isFile() && !pathname.isDirectory();
}
});
for (File tmp : files) {
FileUtils.forceDelete(tmp);
}
}
} catch (Throwable e) {
e.printStackTrace();
throw new GradleException("package awb failed");
}
}
});
}
executorServicesHelper.execute(runnables);
}
use of org.apache.commons.collections.map.HashedMap in project flink by apache.
the class HeapKeyedStateBackend method snapshot.
@Override
@SuppressWarnings("unchecked")
public RunnableFuture<KeyGroupsStateHandle> snapshot(final long checkpointId, final long timestamp, final CheckpointStreamFactory streamFactory, CheckpointOptions checkpointOptions) throws Exception {
if (!hasRegisteredState()) {
return DoneFuture.nullValue();
}
long syncStartTime = System.currentTimeMillis();
Preconditions.checkState(stateTables.size() <= Short.MAX_VALUE, "Too many KV-States: " + stateTables.size() + ". Currently at most " + Short.MAX_VALUE + " states are supported");
List<KeyedBackendSerializationProxy.StateMetaInfo<?, ?>> metaInfoProxyList = new ArrayList<>(stateTables.size());
final Map<String, Integer> kVStateToId = new HashMap<>(stateTables.size());
final Map<StateTable<K, ?, ?>, StateTableSnapshot> cowStateStableSnapshots = new HashedMap(stateTables.size());
for (Map.Entry<String, StateTable<K, ?, ?>> kvState : stateTables.entrySet()) {
RegisteredBackendStateMetaInfo<?, ?> metaInfo = kvState.getValue().getMetaInfo();
KeyedBackendSerializationProxy.StateMetaInfo<?, ?> metaInfoProxy = new KeyedBackendSerializationProxy.StateMetaInfo(metaInfo.getStateType(), metaInfo.getName(), metaInfo.getNamespaceSerializer(), metaInfo.getStateSerializer());
metaInfoProxyList.add(metaInfoProxy);
kVStateToId.put(kvState.getKey(), kVStateToId.size());
StateTable<K, ?, ?> stateTable = kvState.getValue();
if (null != stateTable) {
cowStateStableSnapshots.put(stateTable, stateTable.createSnapshot());
}
}
final KeyedBackendSerializationProxy serializationProxy = new KeyedBackendSerializationProxy(keySerializer, metaInfoProxyList);
//--------------------------------------------------- this becomes the end of sync part
// implementation of the async IO operation, based on FutureTask
final AbstractAsyncIOCallable<KeyGroupsStateHandle, CheckpointStreamFactory.CheckpointStateOutputStream> ioCallable = new AbstractAsyncIOCallable<KeyGroupsStateHandle, CheckpointStreamFactory.CheckpointStateOutputStream>() {
AtomicBoolean open = new AtomicBoolean(false);
@Override
public CheckpointStreamFactory.CheckpointStateOutputStream openIOHandle() throws Exception {
if (open.compareAndSet(false, true)) {
CheckpointStreamFactory.CheckpointStateOutputStream stream = streamFactory.createCheckpointStateOutputStream(checkpointId, timestamp);
try {
cancelStreamRegistry.registerClosable(stream);
return stream;
} catch (Exception ex) {
open.set(false);
throw ex;
}
} else {
throw new IOException("Operation already opened.");
}
}
@Override
public KeyGroupsStateHandle performOperation() throws Exception {
long asyncStartTime = System.currentTimeMillis();
CheckpointStreamFactory.CheckpointStateOutputStream stream = getIoHandle();
DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(stream);
serializationProxy.write(outView);
long[] keyGroupRangeOffsets = new long[keyGroupRange.getNumberOfKeyGroups()];
for (int keyGroupPos = 0; keyGroupPos < keyGroupRange.getNumberOfKeyGroups(); ++keyGroupPos) {
int keyGroupId = keyGroupRange.getKeyGroupId(keyGroupPos);
keyGroupRangeOffsets[keyGroupPos] = stream.getPos();
outView.writeInt(keyGroupId);
for (Map.Entry<String, StateTable<K, ?, ?>> kvState : stateTables.entrySet()) {
outView.writeShort(kVStateToId.get(kvState.getKey()));
cowStateStableSnapshots.get(kvState.getValue()).writeMappingsInKeyGroup(outView, keyGroupId);
}
}
if (open.compareAndSet(true, false)) {
StreamStateHandle streamStateHandle = stream.closeAndGetHandle();
KeyGroupRangeOffsets offsets = new KeyGroupRangeOffsets(keyGroupRange, keyGroupRangeOffsets);
final KeyGroupsStateHandle keyGroupsStateHandle = new KeyGroupsStateHandle(offsets, streamStateHandle);
if (asynchronousSnapshots) {
LOG.info("Heap backend snapshot ({}, asynchronous part) in thread {} took {} ms.", streamFactory, Thread.currentThread(), (System.currentTimeMillis() - asyncStartTime));
}
return keyGroupsStateHandle;
} else {
throw new IOException("Checkpoint stream already closed.");
}
}
@Override
public void done(boolean canceled) {
if (open.compareAndSet(true, false)) {
CheckpointStreamFactory.CheckpointStateOutputStream stream = getIoHandle();
if (null != stream) {
cancelStreamRegistry.unregisterClosable(stream);
IOUtils.closeQuietly(stream);
}
}
for (StateTableSnapshot snapshot : cowStateStableSnapshots.values()) {
snapshot.release();
}
}
};
AsyncStoppableTaskWithCallback<KeyGroupsStateHandle> task = AsyncStoppableTaskWithCallback.from(ioCallable);
if (!asynchronousSnapshots) {
task.run();
}
LOG.info("Heap backend snapshot (" + streamFactory + ", synchronous part) in thread " + Thread.currentThread() + " took " + (System.currentTimeMillis() - syncStartTime) + " ms.");
return task;
}
use of org.apache.commons.collections.map.HashedMap in project Gargoyle by callakrsos.
the class CachedMapTest method miltiThread.
@Test
public void miltiThread() throws InterruptedException {
for (int i = 0; i < 10; i++) {
final int inn = i;
Thread thread = new Thread(() -> {
String name = Thread.currentThread().getName();
CachedMap<Object, Object> cachedMap = new CachedMap<>(1000);
cachedMap.put("sample", "zz" + inn);
cachedMap.put("sample22", "zz" + inn);
System.out.println(name + " " + cachedMap.get("sample"));
try {
Thread.sleep(1500);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
System.out.println(name + " " + cachedMap.get("sample"));
System.out.println(name + " " + cachedMap.get("sample22"));
HashedMap hashedMap = new HashedMap();
hashedMap.put("sample", "zz" + inn);
hashedMap.put("sample22", "zz" + inn);
cachedMap.putAll(hashedMap);
System.out.println(name + " " + cachedMap);
try {
Thread.sleep(1500);
} catch (Exception e) {
e.printStackTrace();
}
System.out.println(name + " " + cachedMap);
}, "Name" + i);
thread.start();
}
Thread.currentThread().sleep(5000);
}
Aggregations