use of java.util.Random in project flink by apache.
the class LargeRecordHandlerITCase method testRecordHandlerCompositeKey.
@Test
public void testRecordHandlerCompositeKey() {
final IOManager ioMan = new IOManagerAsync();
final int PAGE_SIZE = 4 * 1024;
final int NUM_PAGES = 1000;
final int NUM_RECORDS = 10;
try {
final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true);
final AbstractInvokable owner = new DummyInvokable();
final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6);
final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6);
final TypeInformation<?>[] types = new TypeInformation<?>[] { BasicTypeInfo.LONG_TYPE_INFO, new ValueTypeInfo<SomeVeryLongValue>(SomeVeryLongValue.class), BasicTypeInfo.BYTE_TYPE_INFO };
final TupleTypeInfo<Tuple3<Long, SomeVeryLongValue, Byte>> typeInfo = new TupleTypeInfo<Tuple3<Long, SomeVeryLongValue, Byte>>(types);
final TypeSerializer<Tuple3<Long, SomeVeryLongValue, Byte>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple3<Long, SomeVeryLongValue, Byte>> comparator = typeInfo.createComparator(new int[] { 2, 0 }, new boolean[] { true, true }, 0, new ExecutionConfig());
LargeRecordHandler<Tuple3<Long, SomeVeryLongValue, Byte>> handler = new LargeRecordHandler<Tuple3<Long, SomeVeryLongValue, Byte>>(serializer, comparator, ioMan, memMan, initialMemory, owner, 128);
assertFalse(handler.hasData());
// add the test data
Random rnd = new Random();
for (int i = 0; i < NUM_RECORDS; i++) {
long val = rnd.nextLong();
handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(val, new SomeVeryLongValue((int) val), (byte) val));
assertTrue(handler.hasData());
}
MutableObjectIterator<Tuple3<Long, SomeVeryLongValue, Byte>> sorted = handler.finishWriteAndSortKeys(sortMemory);
try {
handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(92L, null, (byte) 1));
fail("should throw an exception");
} catch (IllegalStateException e) {
// expected
}
Tuple3<Long, SomeVeryLongValue, Byte> previous = null;
Tuple3<Long, SomeVeryLongValue, Byte> next;
while ((next = sorted.next(null)) != null) {
// key and value must be equal
assertTrue(next.f0.intValue() == next.f1.val());
assertTrue(next.f0.byteValue() == next.f2);
// order must be correct
if (previous != null) {
assertTrue(previous.f2 <= next.f2);
assertTrue(previous.f2.byteValue() != next.f2.byteValue() || previous.f0 <= next.f0);
}
previous = next;
}
handler.close();
assertFalse(handler.hasData());
handler.close();
try {
handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(92L, null, (byte) 1));
fail("should throw an exception");
} catch (IllegalStateException e) {
// expected
}
assertTrue(memMan.verifyEmpty());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
ioMan.shutdown();
}
}
use of java.util.Random in project flink by apache.
the class LargeRecordHandlerTest method testRecordHandlerCompositeKey.
@Test
public void testRecordHandlerCompositeKey() {
final IOManager ioMan = new IOManagerAsync();
final int PAGE_SIZE = 4 * 1024;
final int NUM_PAGES = 24;
final int NUM_RECORDS = 25000;
try {
final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true);
final AbstractInvokable owner = new DummyInvokable();
final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6);
final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6);
final TupleTypeInfo<Tuple3<Long, String, Byte>> typeInfo = (TupleTypeInfo<Tuple3<Long, String, Byte>>) TypeInfoParser.<Tuple3<Long, String, Byte>>parse("Tuple3<Long, String, Byte>");
final TypeSerializer<Tuple3<Long, String, Byte>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple3<Long, String, Byte>> comparator = typeInfo.createComparator(new int[] { 2, 0 }, new boolean[] { true, true }, 0, new ExecutionConfig());
LargeRecordHandler<Tuple3<Long, String, Byte>> handler = new LargeRecordHandler<Tuple3<Long, String, Byte>>(serializer, comparator, ioMan, memMan, initialMemory, owner, 128);
assertFalse(handler.hasData());
// add the test data
Random rnd = new Random();
for (int i = 0; i < NUM_RECORDS; i++) {
long val = rnd.nextLong();
handler.addRecord(new Tuple3<Long, String, Byte>(val, String.valueOf(val), (byte) val));
assertTrue(handler.hasData());
}
MutableObjectIterator<Tuple3<Long, String, Byte>> sorted = handler.finishWriteAndSortKeys(sortMemory);
try {
handler.addRecord(new Tuple3<Long, String, Byte>(92L, "peter pepper", (byte) 1));
fail("should throw an exception");
} catch (IllegalStateException e) {
// expected
}
Tuple3<Long, String, Byte> previous = null;
Tuple3<Long, String, Byte> next;
while ((next = sorted.next(null)) != null) {
// key and value must be equal
assertTrue(next.f0.equals(Long.parseLong(next.f1)));
assertTrue(next.f0.byteValue() == next.f2);
// order must be correct
if (previous != null) {
assertTrue(previous.f2 <= next.f2);
assertTrue(previous.f2.byteValue() != next.f2.byteValue() || previous.f0 <= next.f0);
}
previous = next;
}
handler.close();
assertFalse(handler.hasData());
handler.close();
try {
handler.addRecord(new Tuple3<Long, String, Byte>(92L, "peter pepper", (byte) 1));
fail("should throw an exception");
} catch (IllegalStateException e) {
// expected
}
assertTrue(memMan.verifyEmpty());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
ioMan.shutdown();
}
}
use of java.util.Random in project flink by apache.
the class PairGenerator method reset.
public void reset() {
this.random = new Random(seed);
this.counter = 0;
}
use of java.util.Random in project hadoop by apache.
the class TestTrash method verifyTrashPermission.
/**
* Create a bunch of files and set with different permission, after
* moved to trash, verify the location in trash directory is expected
* and the permission is reserved.
*
* @throws IOException
*/
public static void verifyTrashPermission(FileSystem fs, Configuration conf) throws IOException {
Path caseRoot = new Path(GenericTestUtils.getTempPath("testTrashPermission"));
try (FileSystem fileSystem = fs) {
Trash trash = new Trash(fileSystem, conf);
FileSystemTestWrapper wrapper = new FileSystemTestWrapper(fileSystem);
short[] filePermssions = { (short) 0600, (short) 0644, (short) 0660, (short) 0700, (short) 0750, (short) 0755, (short) 0775, (short) 0777 };
for (int i = 0; i < filePermssions.length; i++) {
// Set different permission to files
FsPermission fsPermission = new FsPermission(filePermssions[i]);
Path file = new Path(caseRoot, "file" + i);
byte[] randomBytes = new byte[new Random().nextInt(10)];
wrapper.writeFile(file, randomBytes);
wrapper.setPermission(file, fsPermission);
// Move file to trash
trash.moveToTrash(file);
// Verify the file is moved to trash, at expected location
Path trashDir = trash.getCurrentTrashDir(file);
if (!file.isAbsolute()) {
file = wrapper.makeQualified(file);
}
Path fileInTrash = Path.mergePaths(trashDir, file);
FileStatus fstat = wrapper.getFileStatus(fileInTrash);
assertTrue(String.format("File %s is not moved to trash", fileInTrash.toString()), wrapper.exists(fileInTrash));
// Verify permission not change
assertTrue(String.format("Expected file: %s is %s, but actual is %s", fileInTrash.toString(), fsPermission.toString(), fstat.getPermission().toString()), fstat.getPermission().equals(fsPermission));
}
// Verify the trash directory can be removed
Path trashRoot = trash.getCurrentTrashDir();
assertTrue(wrapper.delete(trashRoot, true));
}
}
use of java.util.Random in project hadoop by apache.
the class AbstractContractSeekTest method testRandomSeeks.
/**
* Lifted from TestLocalFileSystem:
* Regression test for HADOOP-9307: BufferedFSInputStream returning
* wrong results after certain sequences of seeks and reads.
*/
@Test
public void testRandomSeeks() throws Throwable {
int limit = getContract().getLimit(TEST_RANDOM_SEEK_COUNT, DEFAULT_RANDOM_SEEK_COUNT);
describe("Testing " + limit + " random seeks");
int filesize = 10 * 1024;
byte[] buf = dataset(filesize, 0, 255);
Path randomSeekFile = path("testrandomseeks.bin");
createFile(getFileSystem(), randomSeekFile, false, buf);
Random r = new Random();
// Record the sequence of seeks and reads which trigger a failure.
int[] seeks = new int[10];
int[] reads = new int[10];
try (FSDataInputStream stm = getFileSystem().open(randomSeekFile)) {
for (int i = 0; i < limit; i++) {
int seekOff = r.nextInt(buf.length);
int toRead = r.nextInt(Math.min(buf.length - seekOff, 32000));
seeks[i % seeks.length] = seekOff;
reads[i % reads.length] = toRead;
verifyRead(stm, buf, seekOff, toRead);
}
} catch (AssertionError afe) {
StringBuilder sb = new StringBuilder();
sb.append("Sequence of actions:\n");
for (int j = 0; j < seeks.length; j++) {
sb.append("seek @ ").append(seeks[j]).append(" ").append("read ").append(reads[j]).append("\n");
}
LOG.error(sb.toString());
throw afe;
}
}
Aggregations