Search in sources :

Example 51 with Random

use of java.util.Random in project flink by apache.

the class CompactingHashTableTest method testDoubleResize.

@Test
public void testDoubleResize() {
    // Only CompactingHashTable
    try {
        final int NUM_MEM_PAGES = 30 * NUM_PAIRS / PAGE_SIZE;
        final Random rnd = new Random(RANDOM_SEED);
        final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);
        List<MemorySegment> memory = getMemory(NUM_MEM_PAGES);
        CompactingHashTable<IntPair> table = new CompactingHashTable<IntPair>(intPairSerializer, intPairComparator, memory);
        table.open();
        for (int i = 0; i < NUM_PAIRS; i++) {
            table.insert(pairs[i]);
        }
        AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(intPairComparator, new SameTypePairComparator<>(intPairComparator));
        IntPair target = new IntPair();
        for (int i = 0; i < NUM_PAIRS; i++) {
            assertNotNull(prober.getMatchFor(pairs[i], target));
            assertEquals(pairs[i].getValue(), target.getValue());
        }
        // make sure there is enough memory for resize
        memory.addAll(getMemory(ADDITIONAL_MEM));
        Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
        assertTrue(b);
        for (int i = 0; i < NUM_PAIRS; i++) {
            assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
            assertEquals(pairs[i].getValue(), target.getValue());
        }
        // make sure there is enough memory for resize
        memory.addAll(getMemory(ADDITIONAL_MEM));
        b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
        assertTrue(b);
        for (int i = 0; i < NUM_PAIRS; i++) {
            assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
            assertEquals(pairs[i].getValue(), target.getValue());
        }
        table.close();
        assertEquals("Memory lost", NUM_MEM_PAGES + ADDITIONAL_MEM + ADDITIONAL_MEM, table.getFreeMemory().size());
    } catch (Exception e) {
        e.printStackTrace();
        fail("Error: " + e.getMessage());
    }
}
Also used : Random(java.util.Random) IntPair(org.apache.flink.runtime.operators.testutils.types.IntPair) MemorySegment(org.apache.flink.core.memory.MemorySegment) Test(org.junit.Test)

Example 52 with Random

use of java.util.Random in project flink by apache.

the class CompactingHashTableTest method testResize.

@Test
public void testResize() {
    // Only CompactingHashTable
    try {
        final int NUM_MEM_PAGES = 30 * NUM_PAIRS / PAGE_SIZE;
        final Random rnd = new Random(RANDOM_SEED);
        final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);
        List<MemorySegment> memory = getMemory(NUM_MEM_PAGES);
        CompactingHashTable<IntPair> table = new CompactingHashTable<IntPair>(intPairSerializer, intPairComparator, memory);
        table.open();
        for (int i = 0; i < NUM_PAIRS; i++) {
            table.insert(pairs[i]);
        }
        AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(intPairComparator, new SameTypePairComparator<>(intPairComparator));
        IntPair target = new IntPair();
        for (int i = 0; i < NUM_PAIRS; i++) {
            assertNotNull(prober.getMatchFor(pairs[i], target));
            assertEquals(pairs[i].getValue(), target.getValue());
        }
        // make sure there is enough memory for resize
        memory.addAll(getMemory(ADDITIONAL_MEM));
        Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
        assertTrue(b);
        for (int i = 0; i < NUM_PAIRS; i++) {
            assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
            assertEquals(pairs[i].getValue(), target.getValue());
        }
        table.close();
        assertEquals("Memory lost", NUM_MEM_PAGES + ADDITIONAL_MEM, table.getFreeMemory().size());
    } catch (Exception e) {
        e.printStackTrace();
        fail("Error: " + e.getMessage());
    }
}
Also used : Random(java.util.Random) IntPair(org.apache.flink.runtime.operators.testutils.types.IntPair) MemorySegment(org.apache.flink.core.memory.MemorySegment) Test(org.junit.Test)

Example 53 with Random

use of java.util.Random in project hadoop by apache.

the class MetricsSinkAdapter method publishMetricsFromQueue.

void publishMetricsFromQueue() {
    int retryDelay = firstRetryDelay;
    int n = retryCount;
    // millis
    int minDelay = Math.min(500, retryDelay * 1000);
    Random rng = new Random(System.nanoTime());
    while (!stopping) {
        try {
            queue.consumeAll(this);
            refreshQueueSizeGauge();
            retryDelay = firstRetryDelay;
            n = retryCount;
            inError = false;
        } catch (InterruptedException e) {
            LOG.info(name + " thread interrupted.");
        } catch (Exception e) {
            if (n > 0) {
                int retryWindow = Math.max(0, 1000 / 2 * retryDelay - minDelay);
                int awhile = rng.nextInt(retryWindow) + minDelay;
                if (!inError) {
                    LOG.error("Got sink exception, retry in " + awhile + "ms", e);
                }
                retryDelay *= retryBackoff;
                try {
                    Thread.sleep(awhile);
                } catch (InterruptedException e2) {
                    LOG.info(name + " thread interrupted while waiting for retry", e2);
                }
                --n;
            } else {
                if (!inError) {
                    LOG.error("Got sink exception and over retry limit, " + "suppressing further error messages", e);
                }
                queue.clear();
                refreshQueueSizeGauge();
                // Don't keep complaining ad infinitum
                inError = true;
            }
        }
    }
}
Also used : Random(java.util.Random)

Example 54 with Random

use of java.util.Random in project flink by apache.

the class SavepointV1SerializerTest method testSerializeDeserializeV1.

/**
	 * Test serialization of {@link SavepointV1} instance.
	 */
@Test
public void testSerializeDeserializeV1() throws Exception {
    Random r = new Random(42);
    for (int i = 0; i < 100; ++i) {
        SavepointV1 expected = new SavepointV1(i + 123123, SavepointV1Test.createTaskStates(1 + r.nextInt(64), 1 + r.nextInt(64)));
        SavepointV1Serializer serializer = SavepointV1Serializer.INSTANCE;
        // Serialize
        ByteArrayOutputStreamWithPos baos = new ByteArrayOutputStreamWithPos();
        serializer.serialize(expected, new DataOutputViewStreamWrapper(baos));
        byte[] bytes = baos.toByteArray();
        // Deserialize
        ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
        Savepoint actual = serializer.deserialize(new DataInputViewStreamWrapper(bais), Thread.currentThread().getContextClassLoader());
        assertEquals(expected, actual);
    }
}
Also used : Random(java.util.Random) DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) ByteArrayInputStream(java.io.ByteArrayInputStream) ByteArrayOutputStreamWithPos(org.apache.flink.core.memory.ByteArrayOutputStreamWithPos) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) Test(org.junit.Test)

Example 55 with Random

use of java.util.Random in project flink by apache.

the class SavepointV1Test method createTaskStates.

static Collection<TaskState> createTaskStates(int numTaskStates, int numSubtasksPerTask) throws IOException {
    Random random = new Random(numTaskStates * 31 + numSubtasksPerTask);
    List<TaskState> taskStates = new ArrayList<>(numTaskStates);
    for (int stateIdx = 0; stateIdx < numTaskStates; ++stateIdx) {
        int chainLength = 1 + random.nextInt(8);
        TaskState taskState = new TaskState(new JobVertexID(), numSubtasksPerTask, 128, chainLength);
        int noNonPartitionableStateAtIndex = random.nextInt(chainLength);
        int noOperatorStateBackendAtIndex = random.nextInt(chainLength);
        int noOperatorStateStreamAtIndex = random.nextInt(chainLength);
        boolean hasKeyedBackend = random.nextInt(4) != 0;
        boolean hasKeyedStream = random.nextInt(4) != 0;
        for (int subtaskIdx = 0; subtaskIdx < numSubtasksPerTask; subtaskIdx++) {
            List<StreamStateHandle> nonPartitionableStates = new ArrayList<>(chainLength);
            List<OperatorStateHandle> operatorStatesBackend = new ArrayList<>(chainLength);
            List<OperatorStateHandle> operatorStatesStream = new ArrayList<>(chainLength);
            for (int chainIdx = 0; chainIdx < chainLength; ++chainIdx) {
                StreamStateHandle nonPartitionableState = new TestByteStreamStateHandleDeepCompare("a-" + chainIdx, ("Hi-" + chainIdx).getBytes(ConfigConstants.DEFAULT_CHARSET));
                StreamStateHandle operatorStateBackend = new TestByteStreamStateHandleDeepCompare("b-" + chainIdx, ("Beautiful-" + chainIdx).getBytes(ConfigConstants.DEFAULT_CHARSET));
                StreamStateHandle operatorStateStream = new TestByteStreamStateHandleDeepCompare("b-" + chainIdx, ("Beautiful-" + chainIdx).getBytes(ConfigConstants.DEFAULT_CHARSET));
                Map<String, OperatorStateHandle.StateMetaInfo> offsetsMap = new HashMap<>();
                offsetsMap.put("A", new OperatorStateHandle.StateMetaInfo(new long[] { 0, 10, 20 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
                offsetsMap.put("B", new OperatorStateHandle.StateMetaInfo(new long[] { 30, 40, 50 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
                offsetsMap.put("C", new OperatorStateHandle.StateMetaInfo(new long[] { 60, 70, 80 }, OperatorStateHandle.Mode.BROADCAST));
                if (chainIdx != noNonPartitionableStateAtIndex) {
                    nonPartitionableStates.add(nonPartitionableState);
                }
                if (chainIdx != noOperatorStateBackendAtIndex) {
                    OperatorStateHandle operatorStateHandleBackend = new OperatorStateHandle(offsetsMap, operatorStateBackend);
                    operatorStatesBackend.add(operatorStateHandleBackend);
                }
                if (chainIdx != noOperatorStateStreamAtIndex) {
                    OperatorStateHandle operatorStateHandleStream = new OperatorStateHandle(offsetsMap, operatorStateStream);
                    operatorStatesStream.add(operatorStateHandleStream);
                }
            }
            KeyGroupsStateHandle keyedStateBackend = null;
            KeyGroupsStateHandle keyedStateStream = null;
            if (hasKeyedBackend) {
                keyedStateBackend = new KeyGroupsStateHandle(new KeyGroupRangeOffsets(1, 1, new long[] { 42 }), new TestByteStreamStateHandleDeepCompare("c", "Hello".getBytes(ConfigConstants.DEFAULT_CHARSET)));
            }
            if (hasKeyedStream) {
                keyedStateStream = new KeyGroupsStateHandle(new KeyGroupRangeOffsets(1, 1, new long[] { 23 }), new TestByteStreamStateHandleDeepCompare("d", "World".getBytes(ConfigConstants.DEFAULT_CHARSET)));
            }
            taskState.putState(subtaskIdx, new SubtaskState(new ChainedStateHandle<>(nonPartitionableStates), new ChainedStateHandle<>(operatorStatesBackend), new ChainedStateHandle<>(operatorStatesStream), keyedStateStream, keyedStateBackend));
        }
        taskStates.add(taskState);
    }
    return taskStates;
}
Also used : HashMap(java.util.HashMap) KeyGroupRangeOffsets(org.apache.flink.runtime.state.KeyGroupRangeOffsets) JobVertexID(org.apache.flink.runtime.jobgraph.JobVertexID) TestByteStreamStateHandleDeepCompare(org.apache.flink.runtime.util.TestByteStreamStateHandleDeepCompare) ArrayList(java.util.ArrayList) KeyGroupsStateHandle(org.apache.flink.runtime.state.KeyGroupsStateHandle) ChainedStateHandle(org.apache.flink.runtime.state.ChainedStateHandle) StreamStateHandle(org.apache.flink.runtime.state.StreamStateHandle) Random(java.util.Random) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) SubtaskState(org.apache.flink.runtime.checkpoint.SubtaskState) OperatorStateHandle(org.apache.flink.runtime.state.OperatorStateHandle) TaskState(org.apache.flink.runtime.checkpoint.TaskState)

Aggregations

Random (java.util.Random)4728 Test (org.junit.Test)1273 ArrayList (java.util.ArrayList)602 IOException (java.io.IOException)313 HashMap (java.util.HashMap)242 File (java.io.File)209 List (java.util.List)154 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)151 ByteArrayInputStream (java.io.ByteArrayInputStream)134 HashSet (java.util.HashSet)129 ByteBuffer (java.nio.ByteBuffer)123 Test (org.testng.annotations.Test)121 Path (org.apache.hadoop.fs.Path)116 Map (java.util.Map)106 QuickTest (com.hazelcast.test.annotation.QuickTest)99 ParallelTest (com.hazelcast.test.annotation.ParallelTest)94 CountDownLatch (java.util.concurrent.CountDownLatch)93 Configuration (org.apache.hadoop.conf.Configuration)88 ByteArrayOutputStream (java.io.ByteArrayOutputStream)79 Before (org.junit.Before)78