use of java.util.Random in project flink by apache.
the class AtomicDisposableReferenceCounterTest method testConcurrentIncrementAndDecrement.
@Test
public void testConcurrentIncrementAndDecrement() throws InterruptedException, ExecutionException, TimeoutException {
final Random random = new Random();
final ExecutorService executor = Executors.newFixedThreadPool(2);
try {
final MockIncrementer incrementer = new MockIncrementer();
final MockDecrementer decrementer = new MockDecrementer();
// Repeat this to provoke races
for (int i = 0; i < 256; i++) {
final AtomicDisposableReferenceCounter counter = new AtomicDisposableReferenceCounter();
incrementer.setCounter(counter);
decrementer.setCounter(counter);
counter.increment();
// Randomly decide which one should be first as the first task usually will win the race
boolean incrementFirst = random.nextBoolean();
Future<Boolean> success1 = executor.submit(incrementFirst ? incrementer : decrementer);
Future<Boolean> success2 = executor.submit(incrementFirst ? decrementer : incrementer);
// Only one of the two should win the race and return true
assertTrue(success1.get() ^ success2.get());
}
} finally {
executor.shutdownNow();
}
}
use of java.util.Random in project flink by apache.
the class AbstractStreamOperatorTest method getKeyInKeyGroupRange.
private static int getKeyInKeyGroupRange(KeyGroupRange range, int maxParallelism) {
Random rand = new Random(System.currentTimeMillis());
int result = rand.nextInt();
while (!range.contains(KeyGroupRangeAssignment.assignToKeyGroup(result, maxParallelism))) {
result = rand.nextInt();
}
return result;
}
use of java.util.Random in project flink by apache.
the class HeapInternalTimerServiceTest method getKeyInKeyGroup.
private static int getKeyInKeyGroup(int keyGroup, int maxParallelism) {
Random rand = new Random(System.currentTimeMillis());
int result = rand.nextInt();
while (KeyGroupRangeAssignment.assignToKeyGroup(result, maxParallelism) != keyGroup) {
result = rand.nextInt();
}
return result;
}
use of java.util.Random in project flink by apache.
the class KeyMapTest method testPutAndGetRandom.
@Test
public void testPutAndGetRandom() {
try {
final KeyMap<Integer, Integer> map = new KeyMap<>();
final Random rnd = new Random();
final long seed = rnd.nextLong();
final int numElements = 10000;
final HashMap<Integer, Integer> groundTruth = new HashMap<>();
rnd.setSeed(seed);
for (int i = 0; i < numElements; i++) {
Integer key = rnd.nextInt();
Integer value = rnd.nextInt();
if (rnd.nextBoolean()) {
groundTruth.put(key, value);
map.put(key, value);
}
}
rnd.setSeed(seed);
for (int i = 0; i < numElements; i++) {
Integer key = rnd.nextInt();
// skip these, evaluating it is tricky due to duplicates
rnd.nextInt();
rnd.nextBoolean();
Integer expected = groundTruth.get(key);
if (expected == null) {
assertNull(map.get(key));
} else {
Integer contained = map.get(key);
assertNotNull(contained);
assertEquals(expected, contained);
}
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of java.util.Random in project flink by apache.
the class SpilledBufferOrEventSequenceTest method testMultipleSequences.
@Test
public void testMultipleSequences() {
File secondFile = null;
FileChannel secondChannel = null;
try {
// create the second file channel
secondFile = File.createTempFile("testdata", "tmp");
secondChannel = new RandomAccessFile(secondFile, "rw").getChannel();
final Random rnd = new Random();
final Random bufferRnd = new Random();
final long bufferSeed = rnd.nextLong();
bufferRnd.setSeed(bufferSeed);
final int numEventsAndBuffers1 = 272;
final int numEventsAndBuffers2 = 151;
final int numChannels = 1656;
final ArrayList<BufferOrEvent> events1 = new ArrayList<BufferOrEvent>(128);
final ArrayList<BufferOrEvent> events2 = new ArrayList<BufferOrEvent>(128);
for (int i = 0; i < numEventsAndBuffers1; i++) {
boolean isEvent = rnd.nextDouble() < 0.05d;
if (isEvent) {
events1.add(generateAndWriteEvent(fileChannel, rnd, numChannels));
} else {
writeBuffer(fileChannel, bufferRnd.nextInt(pageSize) + 1, bufferRnd.nextInt(numChannels));
}
}
for (int i = 0; i < numEventsAndBuffers2; i++) {
boolean isEvent = rnd.nextDouble() < 0.05d;
if (isEvent) {
events2.add(generateAndWriteEvent(secondChannel, rnd, numChannels));
} else {
writeBuffer(secondChannel, bufferRnd.nextInt(pageSize) + 1, bufferRnd.nextInt(numChannels));
}
}
// reset and create reader
fileChannel.position(0L);
secondChannel.position(0L);
bufferRnd.setSeed(bufferSeed);
SpilledBufferOrEventSequence seq1 = new SpilledBufferOrEventSequence(tempFile, fileChannel, buffer, pageSize);
SpilledBufferOrEventSequence seq2 = new SpilledBufferOrEventSequence(secondFile, secondChannel, buffer, pageSize);
// read and validate the sequence 1
seq1.open();
int numEvent = 0;
for (int i = 0; i < numEventsAndBuffers1; i++) {
BufferOrEvent next = seq1.getNext();
if (next.isEvent()) {
BufferOrEvent expected = events1.get(numEvent++);
assertEquals(expected.getEvent(), next.getEvent());
assertEquals(expected.getChannelIndex(), next.getChannelIndex());
} else {
validateBuffer(next, bufferRnd.nextInt(pageSize) + 1, bufferRnd.nextInt(numChannels));
}
}
assertNull(seq1.getNext());
assertEquals(events1.size(), numEvent);
// read and validate the sequence 2
seq2.open();
numEvent = 0;
for (int i = 0; i < numEventsAndBuffers2; i++) {
BufferOrEvent next = seq2.getNext();
if (next.isEvent()) {
BufferOrEvent expected = events2.get(numEvent++);
assertEquals(expected.getEvent(), next.getEvent());
assertEquals(expected.getChannelIndex(), next.getChannelIndex());
} else {
validateBuffer(next, bufferRnd.nextInt(pageSize) + 1, bufferRnd.nextInt(numChannels));
}
}
assertNull(seq2.getNext());
assertEquals(events2.size(), numEvent);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
if (secondChannel != null) {
try {
secondChannel.close();
} catch (IOException e) {
// ignore here
}
}
if (secondFile != null) {
//noinspection ResultOfMethodCallIgnored
secondFile.delete();
}
}
}
Aggregations