Search in sources :

Example 81 with InvocationOnMock

use of org.mockito.invocation.InvocationOnMock in project pulsar by yahoo.

the class PersistentTopicTest method testCreateTopicMLFailure.

@Test
public void testCreateTopicMLFailure() throws Exception {
    final String jinxedTopicName = "persistent://prop/use/ns-abc/topic3";
    doAnswer(new Answer<Object>() {

        @Override
        public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
            new Thread(() -> {
                ((OpenLedgerCallback) invocationOnMock.getArguments()[2]).openLedgerFailed(new ManagedLedgerException("Managed ledger failure"), null);
            }).start();
            return null;
        }
    }).when(mlFactoryMock).asyncOpen(anyString(), any(ManagedLedgerConfig.class), any(OpenLedgerCallback.class), anyObject());
    CompletableFuture<Topic> future = brokerService.getTopic(jinxedTopicName);
    // wait for completion
    try {
        future.get(1, TimeUnit.SECONDS);
        fail("should have failed");
    } catch (TimeoutException e) {
        fail("Should not time out");
    } catch (Exception e) {
    // OK
    }
}
Also used : Matchers.anyString(org.mockito.Matchers.anyString) TimeoutException(java.util.concurrent.TimeoutException) ManagedLedgerException(org.apache.bookkeeper.mledger.ManagedLedgerException) ExecutionException(java.util.concurrent.ExecutionException) ManagedLedgerException(org.apache.bookkeeper.mledger.ManagedLedgerException) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Matchers.anyObject(org.mockito.Matchers.anyObject) ManagedLedgerConfig(org.apache.bookkeeper.mledger.ManagedLedgerConfig) PersistentTopic(com.yahoo.pulsar.broker.service.persistent.PersistentTopic) OpenLedgerCallback(org.apache.bookkeeper.mledger.AsyncCallbacks.OpenLedgerCallback) TimeoutException(java.util.concurrent.TimeoutException) Test(org.testng.annotations.Test)

Example 82 with InvocationOnMock

use of org.mockito.invocation.InvocationOnMock in project spring-boot by spring-projects.

the class SettingsXmlRepositorySystemSessionAutoConfigurationTests method propertyInterpolation.

@Test
public void propertyInterpolation() throws SettingsBuildingException {
    final DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
    given(this.repositorySystem.newLocalRepositoryManager(eq(session), any(LocalRepository.class))).willAnswer(new Answer<LocalRepositoryManager>() {

        @Override
        public LocalRepositoryManager answer(InvocationOnMock invocation) throws Throwable {
            LocalRepository localRepository = invocation.getArgument(1);
            return new SimpleLocalRepositoryManagerFactory().newInstance(session, localRepository);
        }
    });
    SystemProperties.doWithSystemProperties(new Runnable() {

        @Override
        public void run() {
            new SettingsXmlRepositorySystemSessionAutoConfiguration().apply(session, SettingsXmlRepositorySystemSessionAutoConfigurationTests.this.repositorySystem);
        }
    }, "user.home:src/test/resources/maven-settings/property-interpolation", "foo:bar");
    assertThat(session.getLocalRepository().getBasedir().getAbsolutePath()).endsWith(File.separatorChar + "bar" + File.separatorChar + "repository");
}
Also used : DefaultRepositorySystemSession(org.eclipse.aether.DefaultRepositorySystemSession) InvocationOnMock(org.mockito.invocation.InvocationOnMock) LocalRepositoryManager(org.eclipse.aether.repository.LocalRepositoryManager) LocalRepository(org.eclipse.aether.repository.LocalRepository) SimpleLocalRepositoryManagerFactory(org.eclipse.aether.internal.impl.SimpleLocalRepositoryManagerFactory) Test(org.junit.Test)

Example 83 with InvocationOnMock

use of org.mockito.invocation.InvocationOnMock in project flink by apache.

the class CassandraTupleWriteAheadSinkTest method testAckLoopExitOnException.

@Test(timeout = 20000)
public void testAckLoopExitOnException() throws Exception {
    final AtomicReference<Runnable> runnableFuture = new AtomicReference<>();
    final ClusterBuilder clusterBuilder = new ClusterBuilder() {

        private static final long serialVersionUID = 4624400760492936756L;

        @Override
        protected Cluster buildCluster(Cluster.Builder builder) {
            try {
                BoundStatement boundStatement = mock(BoundStatement.class);
                when(boundStatement.setDefaultTimestamp(any(long.class))).thenReturn(boundStatement);
                PreparedStatement preparedStatement = mock(PreparedStatement.class);
                when(preparedStatement.bind(Matchers.anyVararg())).thenReturn(boundStatement);
                ResultSetFuture future = mock(ResultSetFuture.class);
                when(future.get()).thenThrow(new RuntimeException("Expected exception."));
                doAnswer(new Answer<Void>() {

                    @Override
                    public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
                        synchronized (runnableFuture) {
                            runnableFuture.set((((Runnable) invocationOnMock.getArguments()[0])));
                            runnableFuture.notifyAll();
                        }
                        return null;
                    }
                }).when(future).addListener(any(Runnable.class), any(Executor.class));
                Session session = mock(Session.class);
                when(session.prepare(anyString())).thenReturn(preparedStatement);
                when(session.executeAsync(any(BoundStatement.class))).thenReturn(future);
                Cluster cluster = mock(Cluster.class);
                when(cluster.connect()).thenReturn(session);
                return cluster;
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
    };
    // Our asynchronous executor thread
    new Thread(new Runnable() {

        @Override
        public void run() {
            synchronized (runnableFuture) {
                while (runnableFuture.get() == null) {
                    try {
                        runnableFuture.wait();
                    } catch (InterruptedException e) {
                    // ignore interrupts
                    }
                }
            }
            runnableFuture.get().run();
        }
    }).start();
    CheckpointCommitter cc = mock(CheckpointCommitter.class);
    final CassandraTupleWriteAheadSink<Tuple0> sink = new CassandraTupleWriteAheadSink<>("abc", TupleTypeInfo.of(Tuple0.class).createSerializer(new ExecutionConfig()), clusterBuilder, cc);
    OneInputStreamOperatorTestHarness<Tuple0, Tuple0> harness = new OneInputStreamOperatorTestHarness(sink);
    harness.getEnvironment().getTaskConfiguration().setBoolean("checkpointing", true);
    harness.setup();
    sink.open();
    // we should leave the loop and return false since we've seen an exception
    assertFalse(sink.sendValues(Collections.singleton(new Tuple0()), 0L));
    sink.close();
}
Also used : ResultSetFuture(com.datastax.driver.core.ResultSetFuture) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) CheckpointCommitter(org.apache.flink.streaming.runtime.operators.CheckpointCommitter) Executor(java.util.concurrent.Executor) Cluster(com.datastax.driver.core.Cluster) AtomicReference(java.util.concurrent.atomic.AtomicReference) PreparedStatement(com.datastax.driver.core.PreparedStatement) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) Tuple0(org.apache.flink.api.java.tuple.Tuple0) InvocationOnMock(org.mockito.invocation.InvocationOnMock) BoundStatement(com.datastax.driver.core.BoundStatement) Session(com.datastax.driver.core.Session) Test(org.junit.Test)

Example 84 with InvocationOnMock

use of org.mockito.invocation.InvocationOnMock in project flink by apache.

the class Kafka010FetcherTest method testCommitDoesNotBlock.

@Test
public void testCommitDoesNotBlock() throws Exception {
    // test data
    final KafkaTopicPartition testPartition = new KafkaTopicPartition("test", 42);
    final Map<KafkaTopicPartition, Long> testCommitData = new HashMap<>();
    testCommitData.put(testPartition, 11L);
    // to synchronize when the consumer is in its blocking method
    final OneShotLatch sync = new OneShotLatch();
    // ----- the mock consumer with blocking poll calls ----
    final MultiShotLatch blockerLatch = new MultiShotLatch();
    KafkaConsumer<?, ?> mockConsumer = mock(KafkaConsumer.class);
    when(mockConsumer.poll(anyLong())).thenAnswer(new Answer<ConsumerRecords<?, ?>>() {

        @Override
        public ConsumerRecords<?, ?> answer(InvocationOnMock invocation) throws InterruptedException {
            sync.trigger();
            blockerLatch.await();
            return ConsumerRecords.empty();
        }
    });
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) {
            blockerLatch.trigger();
            return null;
        }
    }).when(mockConsumer).wakeup();
    // make sure the fetcher creates the mock consumer
    whenNew(KafkaConsumer.class).withAnyArguments().thenReturn(mockConsumer);
    // ----- create the test fetcher -----
    @SuppressWarnings("unchecked") SourceContext<String> sourceContext = mock(SourceContext.class);
    Map<KafkaTopicPartition, Long> partitionsWithInitialOffsets = Collections.singletonMap(new KafkaTopicPartition("test", 42), KafkaTopicPartitionStateSentinel.GROUP_OFFSET);
    KeyedDeserializationSchema<String> schema = new KeyedDeserializationSchemaWrapper<>(new SimpleStringSchema());
    final Kafka010Fetcher<String> fetcher = new Kafka010Fetcher<>(sourceContext, partitionsWithInitialOffsets, null, /* periodic assigner */
    null, /* punctuated assigner */
    new TestProcessingTimeService(), 10, getClass().getClassLoader(), "taskname-with-subtask", new UnregisteredMetricsGroup(), schema, new Properties(), 0L, false);
    // ----- run the fetcher -----
    final AtomicReference<Throwable> error = new AtomicReference<>();
    final Thread fetcherRunner = new Thread("fetcher runner") {

        @Override
        public void run() {
            try {
                fetcher.runFetchLoop();
            } catch (Throwable t) {
                error.set(t);
            }
        }
    };
    fetcherRunner.start();
    // wait until the fetcher has reached the method of interest
    sync.await();
    // ----- trigger the offset commit -----
    final AtomicReference<Throwable> commitError = new AtomicReference<>();
    final Thread committer = new Thread("committer runner") {

        @Override
        public void run() {
            try {
                fetcher.commitInternalOffsetsToKafka(testCommitData);
            } catch (Throwable t) {
                commitError.set(t);
            }
        }
    };
    committer.start();
    // ----- ensure that the committer finishes in time  -----
    committer.join(30000);
    assertFalse("The committer did not finish in time", committer.isAlive());
    // ----- test done, wait till the fetcher is done for a clean shutdown -----
    fetcher.cancel();
    fetcherRunner.join();
    // check that there were no errors in the fetcher
    final Throwable fetcherError = error.get();
    if (fetcherError != null && !(fetcherError instanceof Handover.ClosedException)) {
        throw new Exception("Exception in the fetcher", fetcherError);
    }
    final Throwable committerError = commitError.get();
    if (committerError != null) {
        throw new Exception("Exception in the committer", committerError);
    }
}
Also used : UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) HashMap(java.util.HashMap) MultiShotLatch(org.apache.flink.core.testutils.MultiShotLatch) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) Properties(java.util.Properties) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) KeyedDeserializationSchemaWrapper(org.apache.flink.streaming.util.serialization.KeyedDeserializationSchemaWrapper) Handover(org.apache.flink.streaming.connectors.kafka.internal.Handover) OneShotLatch(org.apache.flink.core.testutils.OneShotLatch) AtomicReference(java.util.concurrent.atomic.AtomicReference) KafkaConsumerThread(org.apache.flink.streaming.connectors.kafka.internal.KafkaConsumerThread) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Mockito.anyLong(org.mockito.Mockito.anyLong) SimpleStringSchema(org.apache.flink.streaming.util.serialization.SimpleStringSchema) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService) Kafka010Fetcher(org.apache.flink.streaming.connectors.kafka.internal.Kafka010Fetcher) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 85 with InvocationOnMock

use of org.mockito.invocation.InvocationOnMock in project flink by apache.

the class Kafka010FetcherTest method testCancellationWhenEmitBlocks.

@Test
public void testCancellationWhenEmitBlocks() throws Exception {
    // ----- some test data -----
    final String topic = "test-topic";
    final int partition = 3;
    final byte[] payload = new byte[] { 1, 2, 3, 4 };
    final List<ConsumerRecord<byte[], byte[]>> records = Arrays.asList(new ConsumerRecord<byte[], byte[]>(topic, partition, 15, payload, payload), new ConsumerRecord<byte[], byte[]>(topic, partition, 16, payload, payload), new ConsumerRecord<byte[], byte[]>(topic, partition, 17, payload, payload));
    final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> data = new HashMap<>();
    data.put(new TopicPartition(topic, partition), records);
    final ConsumerRecords<byte[], byte[]> consumerRecords = new ConsumerRecords<>(data);
    // ----- the test consumer -----
    final KafkaConsumer<?, ?> mockConsumer = mock(KafkaConsumer.class);
    when(mockConsumer.poll(anyLong())).thenAnswer(new Answer<ConsumerRecords<?, ?>>() {

        @Override
        public ConsumerRecords<?, ?> answer(InvocationOnMock invocation) {
            return consumerRecords;
        }
    });
    whenNew(KafkaConsumer.class).withAnyArguments().thenReturn(mockConsumer);
    // ----- build a fetcher -----
    BlockingSourceContext<String> sourceContext = new BlockingSourceContext<>();
    Map<KafkaTopicPartition, Long> partitionsWithInitialOffsets = Collections.singletonMap(new KafkaTopicPartition(topic, partition), KafkaTopicPartitionStateSentinel.GROUP_OFFSET);
    KeyedDeserializationSchema<String> schema = new KeyedDeserializationSchemaWrapper<>(new SimpleStringSchema());
    final Kafka010Fetcher<String> fetcher = new Kafka010Fetcher<>(sourceContext, partitionsWithInitialOffsets, null, /* periodic watermark extractor */
    null, /* punctuated watermark extractor */
    new TestProcessingTimeService(), 10, /* watermark interval */
    this.getClass().getClassLoader(), "task_name", new UnregisteredMetricsGroup(), schema, new Properties(), 0L, false);
    // ----- run the fetcher -----
    final AtomicReference<Throwable> error = new AtomicReference<>();
    final Thread fetcherRunner = new Thread("fetcher runner") {

        @Override
        public void run() {
            try {
                fetcher.runFetchLoop();
            } catch (Throwable t) {
                error.set(t);
            }
        }
    };
    fetcherRunner.start();
    // wait until the thread started to emit records to the source context
    sourceContext.waitTillHasBlocker();
    // now we try to cancel the fetcher, including the interruption usually done on the task thread
    // once it has finished, there must be no more thread blocked on the source context
    fetcher.cancel();
    fetcherRunner.interrupt();
    fetcherRunner.join();
    assertFalse("fetcher threads did not properly finish", sourceContext.isStillBlocking());
}
Also used : UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) HashMap(java.util.HashMap) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) Properties(java.util.Properties) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) KeyedDeserializationSchemaWrapper(org.apache.flink.streaming.util.serialization.KeyedDeserializationSchemaWrapper) List(java.util.List) AtomicReference(java.util.concurrent.atomic.AtomicReference) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) KafkaConsumerThread(org.apache.flink.streaming.connectors.kafka.internal.KafkaConsumerThread) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Mockito.anyLong(org.mockito.Mockito.anyLong) SimpleStringSchema(org.apache.flink.streaming.util.serialization.SimpleStringSchema) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService) Kafka010Fetcher(org.apache.flink.streaming.connectors.kafka.internal.Kafka010Fetcher) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

InvocationOnMock (org.mockito.invocation.InvocationOnMock)1088 Test (org.junit.Test)655 Answer (org.mockito.stubbing.Answer)287 Matchers.anyString (org.mockito.Matchers.anyString)145 HashMap (java.util.HashMap)124 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)113 Before (org.junit.Before)110 Mockito.doAnswer (org.mockito.Mockito.doAnswer)110 ArrayList (java.util.ArrayList)109 IOException (java.io.IOException)92 Context (android.content.Context)68 List (java.util.List)62 AtomicReference (java.util.concurrent.atomic.AtomicReference)61 CountDownLatch (java.util.concurrent.CountDownLatch)59 Test (org.testng.annotations.Test)59 File (java.io.File)55 UUID (java.util.UUID)46 Configuration (org.apache.hadoop.conf.Configuration)46 Activity (android.app.Activity)40 Semaphore (java.util.concurrent.Semaphore)38