use of cn.hutool.core.collection.ConcurrentHashSet in project hutool by looly.
the class IdUtilTest method snowflakeBenchTest2.
@Test
@Ignore
public void snowflakeBenchTest2() {
final Set<Long> set = new ConcurrentHashSet<>();
// 线程数
int threadCount = 100;
// 每个线程生成的ID数
final int idCountPerThread = 10000;
final CountDownLatch latch = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++) {
ThreadUtil.execute(() -> {
for (int i1 = 0; i1 < idCountPerThread; i1++) {
long id = IdUtil.getSnowflake(1, 1).nextId();
set.add(id);
// Console.log("Add new id: {}", id);
}
latch.countDown();
});
}
// 等待全部线程结束
try {
latch.await();
} catch (InterruptedException e) {
throw new UtilException(e);
}
Assert.assertEquals(threadCount * idCountPerThread, set.size());
}
use of cn.hutool.core.collection.ConcurrentHashSet in project hutool by looly.
the class IdUtilTest method snowflakeBenchTest.
@Test
@Ignore
public void snowflakeBenchTest() {
final Set<Long> set = new ConcurrentHashSet<>();
final Snowflake snowflake = IdUtil.getSnowflake(1, 1);
// 线程数
int threadCount = 100;
// 每个线程生成的ID数
final int idCountPerThread = 10000;
final CountDownLatch latch = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++) {
ThreadUtil.execute(() -> {
for (int i1 = 0; i1 < idCountPerThread; i1++) {
long id = snowflake.nextId();
set.add(id);
// Console.log("Add new id: {}", id);
}
latch.countDown();
});
}
// 等待全部线程结束
try {
latch.await();
} catch (InterruptedException e) {
throw new UtilException(e);
}
Assert.assertEquals(threadCount * idCountPerThread, set.size());
}
Aggregations