use of java.util.concurrent.CopyOnWriteArrayList in project Rajawali by Rajawali.
the class Octree method init.
/*
* (non-Javadoc)
* @see rajawali.scenegraph.AD_AABBTree#init()
*/
@Override
protected void init() {
//Pre-allocate storage here to favor modification speed
CHILD_COUNT = 8;
mChildren = new Octree[CHILD_COUNT];
mMembers = Collections.synchronizedList(new CopyOnWriteArrayList<IGraphNodeMember>());
if (//mOutside should not be used for children, thus we want to force the Null pointer.
mParent == null)
mOutside = Collections.synchronizedList(new CopyOnWriteArrayList<IGraphNodeMember>());
mChildLengths = new Vector3();
}
use of java.util.concurrent.CopyOnWriteArrayList in project tinker by Tencent.
the class FileUtil method zip.
/**
* zip
*
* @param outputZipFullFilename
* @param inputZipFullFilename,can null,the entry will not from the input file
* @param zipEntryPathList
* @param zipProcessor
*/
public static void zip(String outputZipFullFilename, String inputZipFullFilename, List<ZipEntryPath> zipEntryPathList, ZipProcessor zipProcessor) {
ZipOutputStream zipOutputStream = null;
ZipFile zipFile = null;
Map<String, ZipEntryPath> zipEntryPathMap = new HashMap<String, ZipEntryPath>();
List<String> needToAddEntryNameList = new CopyOnWriteArrayList<String>();
if (zipEntryPathList != null) {
for (ZipEntryPath zipEntryPath : zipEntryPathList) {
zipEntryPathMap.put(zipEntryPath.zipEntry.getName(), zipEntryPath);
needToAddEntryNameList.add(zipEntryPath.zipEntry.getName());
}
}
try {
createFile(outputZipFullFilename);
zipOutputStream = new ZipOutputStream(new FileOutputStream(outputZipFullFilename));
if (inputZipFullFilename != null) {
zipFile = new ZipFile(inputZipFullFilename);
Enumeration<? extends ZipEntry> enumeration = zipFile.entries();
while (enumeration.hasMoreElements()) {
ZipEntry zipEntry = enumeration.nextElement();
String zipEntryName = zipEntry.getName();
InputStream inputStream = null;
if (zipEntryPathMap.containsKey(zipEntryName)) {
ZipEntryPath zipEntryPath = zipEntryPathMap.get(zipEntryName);
needToAddEntryNameList.remove(zipEntryName);
if (zipEntryPath.replace) {
zipEntry = zipEntryPath.zipEntry;
inputStream = new FileInputStream(zipEntryPath.fullFilename);
}
}
if (inputStream == null) {
inputStream = zipFile.getInputStream(zipEntry);
if (zipProcessor != null) {
inputStream = zipProcessor.zipEntryProcess(zipEntryName, inputStream);
}
}
ZipEntry newZipEntry = new ZipEntry(zipEntryName);
addZipEntry(zipOutputStream, newZipEntry, inputStream);
}
}
for (String zipEntryName : needToAddEntryNameList) {
ZipEntryPath zipEntryPath = zipEntryPathMap.get(zipEntryName);
ZipEntry zipEntry = zipEntryPath.zipEntry;
InputStream inputStream = new FileInputStream(zipEntryPath.fullFilename);
if (zipProcessor != null) {
inputStream = zipProcessor.zipEntryProcess(zipEntry.getName(), inputStream);
}
addZipEntry(zipOutputStream, zipEntry, inputStream);
}
} catch (Exception e) {
throw new FileUtilException(e);
} finally {
try {
if (zipOutputStream != null) {
zipOutputStream.finish();
zipOutputStream.flush();
zipOutputStream.close();
}
if (zipFile != null) {
zipFile.close();
}
} catch (Exception e) {
throw new FileUtilException(e);
}
}
}
use of java.util.concurrent.CopyOnWriteArrayList in project redisson by redisson.
the class RedissonBatchTest method testOrdering.
@Test
public void testOrdering() throws InterruptedException {
ExecutorService e = Executors.newFixedThreadPool(16);
final RBatch batch = redisson.createBatch();
final AtomicLong index = new AtomicLong(-1);
final List<RFuture<Long>> futures = new CopyOnWriteArrayList<>();
for (int i = 0; i < 500; i++) {
futures.add(null);
}
for (int i = 0; i < 500; i++) {
final int j = i;
e.execute(new Runnable() {
@Override
public void run() {
synchronized (RedissonBatchTest.this) {
int i = (int) index.incrementAndGet();
int ind = j % 3;
RFuture<Long> f1 = batch.getAtomicLong("test" + ind).addAndGetAsync(j);
futures.set(i, f1);
}
}
});
}
e.shutdown();
Assert.assertTrue(e.awaitTermination(30, TimeUnit.SECONDS));
List<?> s = batch.execute();
int i = 0;
for (Object element : s) {
RFuture<Long> a = futures.get(i);
Assert.assertEquals(a.getNow(), element);
i++;
}
}
use of java.util.concurrent.CopyOnWriteArrayList in project Prism-Bukkit by prism.
the class PurgeManager method run.
/**
*
*/
@Override
public void run() {
Prism.log("Scheduled purge executor beginning new run...");
if (!purgeRules.isEmpty()) {
final CopyOnWriteArrayList<QueryParameters> paramList = new CopyOnWriteArrayList<QueryParameters>();
for (final String purgeArgs : purgeRules) {
// Process and validate all of the arguments
final QueryParameters parameters = PreprocessArgs.process(plugin, null, purgeArgs.split(" "), PrismProcessType.DELETE, 0, false);
if (parameters == null) {
Prism.log("Invalid parameters for database purge: " + purgeArgs);
continue;
}
if (parameters.getFoundArgs().size() > 0) {
parameters.setStringFromRawArgs(purgeArgs.split(" "), 0);
paramList.add(parameters);
}
}
if (paramList.size() > 0) {
// Identify the minimum for chunking
final int minId = PurgeChunkingUtil.getMinimumPrimaryKey();
if (minId == 0) {
Prism.log("No minimum primary key could be found for purge chunking.");
return;
}
// Identify the max id for chunking
final int maxId = PurgeChunkingUtil.getMaximumPrimaryKey();
if (maxId == 0) {
Prism.log("No maximum primary key could be found for purge chunking.");
return;
}
int purge_tick_delay = plugin.getConfig().getInt("prism.purge.batch-tick-delay");
if (purge_tick_delay < 1) {
purge_tick_delay = 20;
}
/**
* We're going to cycle through the param rules, one rule at a
* time in a single async task. This task will reschedule itself
* when each purge cycle has completed and records remain
*/
Prism.log("Beginning prism database purge cycle. Will be performed in batches so we don't tie up the db...");
deleteTask = Bukkit.getServer().getScheduler().runTaskLaterAsynchronously(plugin, new PurgeTask(plugin, paramList, purge_tick_delay, minId, maxId, new LogPurgeCallback()), purge_tick_delay);
}
} else {
Prism.log("Purge rules are empty, not purging anything.");
}
}
use of java.util.concurrent.CopyOnWriteArrayList in project robovm by robovm.
the class ConcurrentCloseTest method test_read_multiple.
public void test_read_multiple() throws Throwable {
SilentServer ss = new SilentServer();
final Socket s = new Socket();
s.connect(ss.getLocalSocketAddress());
// We want to test that we unblock *all* the threads blocked on a socket, not just one.
// We know the implementation uses the same mechanism for all blocking calls, so we just
// test read(2) because it's the easiest to test. (recv(2), for example, is only accessible
// from Java via a synchronized method.)
final ArrayList<Thread> threads = new ArrayList<Thread>();
final List<Throwable> thrownExceptions = new CopyOnWriteArrayList<Throwable>();
for (int i = 0; i < 10; ++i) {
Thread t = new Thread(new Runnable() {
public void run() {
try {
try {
System.err.println("read...");
int i = s.getInputStream().read();
fail("read returned: " + i);
} catch (SocketException expected) {
assertEquals("Socket closed", expected.getMessage());
}
} catch (Throwable ex) {
thrownExceptions.add(ex);
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
new Killer(s).start();
for (Thread t : threads) {
t.join();
}
for (Throwable exception : thrownExceptions) {
throw exception;
}
ss.close();
}
Aggregations