Search in sources :

Example 61 with Random

use of java.util.Random in project flink by apache.

the class LargeRecordHandlerITCase method testRecordHandlerCompositeKey.

@Test
public void testRecordHandlerCompositeKey() {
    final IOManager ioMan = new IOManagerAsync();
    final int PAGE_SIZE = 4 * 1024;
    final int NUM_PAGES = 1000;
    final int NUM_RECORDS = 10;
    try {
        final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true);
        final AbstractInvokable owner = new DummyInvokable();
        final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6);
        final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6);
        final TypeInformation<?>[] types = new TypeInformation<?>[] { BasicTypeInfo.LONG_TYPE_INFO, new ValueTypeInfo<SomeVeryLongValue>(SomeVeryLongValue.class), BasicTypeInfo.BYTE_TYPE_INFO };
        final TupleTypeInfo<Tuple3<Long, SomeVeryLongValue, Byte>> typeInfo = new TupleTypeInfo<Tuple3<Long, SomeVeryLongValue, Byte>>(types);
        final TypeSerializer<Tuple3<Long, SomeVeryLongValue, Byte>> serializer = typeInfo.createSerializer(new ExecutionConfig());
        final TypeComparator<Tuple3<Long, SomeVeryLongValue, Byte>> comparator = typeInfo.createComparator(new int[] { 2, 0 }, new boolean[] { true, true }, 0, new ExecutionConfig());
        LargeRecordHandler<Tuple3<Long, SomeVeryLongValue, Byte>> handler = new LargeRecordHandler<Tuple3<Long, SomeVeryLongValue, Byte>>(serializer, comparator, ioMan, memMan, initialMemory, owner, 128);
        assertFalse(handler.hasData());
        // add the test data
        Random rnd = new Random();
        for (int i = 0; i < NUM_RECORDS; i++) {
            long val = rnd.nextLong();
            handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(val, new SomeVeryLongValue((int) val), (byte) val));
            assertTrue(handler.hasData());
        }
        MutableObjectIterator<Tuple3<Long, SomeVeryLongValue, Byte>> sorted = handler.finishWriteAndSortKeys(sortMemory);
        try {
            handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(92L, null, (byte) 1));
            fail("should throw an exception");
        } catch (IllegalStateException e) {
        // expected
        }
        Tuple3<Long, SomeVeryLongValue, Byte> previous = null;
        Tuple3<Long, SomeVeryLongValue, Byte> next;
        while ((next = sorted.next(null)) != null) {
            // key and value must be equal
            assertTrue(next.f0.intValue() == next.f1.val());
            assertTrue(next.f0.byteValue() == next.f2);
            // order must be correct
            if (previous != null) {
                assertTrue(previous.f2 <= next.f2);
                assertTrue(previous.f2.byteValue() != next.f2.byteValue() || previous.f0 <= next.f0);
            }
            previous = next;
        }
        handler.close();
        assertFalse(handler.hasData());
        handler.close();
        try {
            handler.addRecord(new Tuple3<Long, SomeVeryLongValue, Byte>(92L, null, (byte) 1));
            fail("should throw an exception");
        } catch (IllegalStateException e) {
        // expected
        }
        assertTrue(memMan.verifyEmpty());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        ioMan.shutdown();
    }
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) AbstractInvokable(org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) IOManagerAsync(org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync) Random(java.util.Random) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) ValueTypeInfo(org.apache.flink.api.java.typeutils.ValueTypeInfo) IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) MemorySegment(org.apache.flink.core.memory.MemorySegment) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) IOException(java.io.IOException) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Test(org.junit.Test)

Example 62 with Random

use of java.util.Random in project flink by apache.

the class LargeRecordHandlerTest method testRecordHandlerCompositeKey.

@Test
public void testRecordHandlerCompositeKey() {
    final IOManager ioMan = new IOManagerAsync();
    final int PAGE_SIZE = 4 * 1024;
    final int NUM_PAGES = 24;
    final int NUM_RECORDS = 25000;
    try {
        final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true);
        final AbstractInvokable owner = new DummyInvokable();
        final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6);
        final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6);
        final TupleTypeInfo<Tuple3<Long, String, Byte>> typeInfo = (TupleTypeInfo<Tuple3<Long, String, Byte>>) TypeInfoParser.<Tuple3<Long, String, Byte>>parse("Tuple3<Long, String, Byte>");
        final TypeSerializer<Tuple3<Long, String, Byte>> serializer = typeInfo.createSerializer(new ExecutionConfig());
        final TypeComparator<Tuple3<Long, String, Byte>> comparator = typeInfo.createComparator(new int[] { 2, 0 }, new boolean[] { true, true }, 0, new ExecutionConfig());
        LargeRecordHandler<Tuple3<Long, String, Byte>> handler = new LargeRecordHandler<Tuple3<Long, String, Byte>>(serializer, comparator, ioMan, memMan, initialMemory, owner, 128);
        assertFalse(handler.hasData());
        // add the test data
        Random rnd = new Random();
        for (int i = 0; i < NUM_RECORDS; i++) {
            long val = rnd.nextLong();
            handler.addRecord(new Tuple3<Long, String, Byte>(val, String.valueOf(val), (byte) val));
            assertTrue(handler.hasData());
        }
        MutableObjectIterator<Tuple3<Long, String, Byte>> sorted = handler.finishWriteAndSortKeys(sortMemory);
        try {
            handler.addRecord(new Tuple3<Long, String, Byte>(92L, "peter pepper", (byte) 1));
            fail("should throw an exception");
        } catch (IllegalStateException e) {
        // expected
        }
        Tuple3<Long, String, Byte> previous = null;
        Tuple3<Long, String, Byte> next;
        while ((next = sorted.next(null)) != null) {
            // key and value must be equal
            assertTrue(next.f0.equals(Long.parseLong(next.f1)));
            assertTrue(next.f0.byteValue() == next.f2);
            // order must be correct
            if (previous != null) {
                assertTrue(previous.f2 <= next.f2);
                assertTrue(previous.f2.byteValue() != next.f2.byteValue() || previous.f0 <= next.f0);
            }
            previous = next;
        }
        handler.close();
        assertFalse(handler.hasData());
        handler.close();
        try {
            handler.addRecord(new Tuple3<Long, String, Byte>(92L, "peter pepper", (byte) 1));
            fail("should throw an exception");
        } catch (IllegalStateException e) {
        // expected
        }
        assertTrue(memMan.verifyEmpty());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        ioMan.shutdown();
    }
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) AbstractInvokable(org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable) IOManagerAsync(org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync) Random(java.util.Random) DummyInvokable(org.apache.flink.runtime.operators.testutils.DummyInvokable) IOManager(org.apache.flink.runtime.io.disk.iomanager.IOManager) MemoryManager(org.apache.flink.runtime.memory.MemoryManager) MemorySegment(org.apache.flink.core.memory.MemorySegment) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Test(org.junit.Test)

Example 63 with Random

use of java.util.Random in project flink by apache.

the class PairGenerator method reset.

public void reset() {
    this.random = new Random(seed);
    this.counter = 0;
}
Also used : Random(java.util.Random)

Example 64 with Random

use of java.util.Random in project hadoop by apache.

the class TestTrash method verifyTrashPermission.

/**
   * Create a bunch of files and set with different permission, after
   * moved to trash, verify the location in trash directory is expected
   * and the permission is reserved.
   *
   * @throws IOException
   */
public static void verifyTrashPermission(FileSystem fs, Configuration conf) throws IOException {
    Path caseRoot = new Path(GenericTestUtils.getTempPath("testTrashPermission"));
    try (FileSystem fileSystem = fs) {
        Trash trash = new Trash(fileSystem, conf);
        FileSystemTestWrapper wrapper = new FileSystemTestWrapper(fileSystem);
        short[] filePermssions = { (short) 0600, (short) 0644, (short) 0660, (short) 0700, (short) 0750, (short) 0755, (short) 0775, (short) 0777 };
        for (int i = 0; i < filePermssions.length; i++) {
            // Set different permission to files
            FsPermission fsPermission = new FsPermission(filePermssions[i]);
            Path file = new Path(caseRoot, "file" + i);
            byte[] randomBytes = new byte[new Random().nextInt(10)];
            wrapper.writeFile(file, randomBytes);
            wrapper.setPermission(file, fsPermission);
            // Move file to trash
            trash.moveToTrash(file);
            // Verify the file is moved to trash, at expected location
            Path trashDir = trash.getCurrentTrashDir(file);
            if (!file.isAbsolute()) {
                file = wrapper.makeQualified(file);
            }
            Path fileInTrash = Path.mergePaths(trashDir, file);
            FileStatus fstat = wrapper.getFileStatus(fileInTrash);
            assertTrue(String.format("File %s is not moved to trash", fileInTrash.toString()), wrapper.exists(fileInTrash));
            // Verify permission not change
            assertTrue(String.format("Expected file: %s is %s, but actual is %s", fileInTrash.toString(), fsPermission.toString(), fstat.getPermission().toString()), fstat.getPermission().equals(fsPermission));
        }
        // Verify the trash directory can be removed
        Path trashRoot = trash.getCurrentTrashDir();
        assertTrue(wrapper.delete(trashRoot, true));
    }
}
Also used : Random(java.util.Random) FsPermission(org.apache.hadoop.fs.permission.FsPermission)

Example 65 with Random

use of java.util.Random in project hadoop by apache.

the class AbstractContractSeekTest method testRandomSeeks.

/**
   * Lifted from TestLocalFileSystem:
   * Regression test for HADOOP-9307: BufferedFSInputStream returning
   * wrong results after certain sequences of seeks and reads.
   */
@Test
public void testRandomSeeks() throws Throwable {
    int limit = getContract().getLimit(TEST_RANDOM_SEEK_COUNT, DEFAULT_RANDOM_SEEK_COUNT);
    describe("Testing " + limit + " random seeks");
    int filesize = 10 * 1024;
    byte[] buf = dataset(filesize, 0, 255);
    Path randomSeekFile = path("testrandomseeks.bin");
    createFile(getFileSystem(), randomSeekFile, false, buf);
    Random r = new Random();
    // Record the sequence of seeks and reads which trigger a failure.
    int[] seeks = new int[10];
    int[] reads = new int[10];
    try (FSDataInputStream stm = getFileSystem().open(randomSeekFile)) {
        for (int i = 0; i < limit; i++) {
            int seekOff = r.nextInt(buf.length);
            int toRead = r.nextInt(Math.min(buf.length - seekOff, 32000));
            seeks[i % seeks.length] = seekOff;
            reads[i % reads.length] = toRead;
            verifyRead(stm, buf, seekOff, toRead);
        }
    } catch (AssertionError afe) {
        StringBuilder sb = new StringBuilder();
        sb.append("Sequence of actions:\n");
        for (int j = 0; j < seeks.length; j++) {
            sb.append("seek @ ").append(seeks[j]).append("  ").append("read ").append(reads[j]).append("\n");
        }
        LOG.error(sb.toString());
        throw afe;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Random(java.util.Random) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Test(org.junit.Test)

Aggregations

Random (java.util.Random)4728 Test (org.junit.Test)1273 ArrayList (java.util.ArrayList)602 IOException (java.io.IOException)313 HashMap (java.util.HashMap)242 File (java.io.File)209 List (java.util.List)154 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)151 ByteArrayInputStream (java.io.ByteArrayInputStream)134 HashSet (java.util.HashSet)129 ByteBuffer (java.nio.ByteBuffer)123 Test (org.testng.annotations.Test)121 Path (org.apache.hadoop.fs.Path)116 Map (java.util.Map)106 QuickTest (com.hazelcast.test.annotation.QuickTest)99 ParallelTest (com.hazelcast.test.annotation.ParallelTest)94 CountDownLatch (java.util.concurrent.CountDownLatch)93 Configuration (org.apache.hadoop.conf.Configuration)88 ByteArrayOutputStream (java.io.ByteArrayOutputStream)79 Before (org.junit.Before)78