use of io.pravega.shared.protocol.netty.PravegaNodeUri in project pravega by pravega.
the class LargeEventWriterTest method testBufferSplitting.
@Test(timeout = 5000)
public void testBufferSplitting() throws NoSuchSegmentException, AuthenticationException, SegmentSealedException, ConnectionFailedException {
Segment segment = Segment.fromScopedName("foo/bar/1");
MockConnectionFactoryImpl connectionFactory = new MockConnectionFactoryImpl();
MockController controller = new MockController("localhost", 0, connectionFactory, false);
ClientConnection connection = Mockito.mock(ClientConnection.class);
PravegaNodeUri location = new PravegaNodeUri("localhost", 0);
connectionFactory.provideConnection(location, connection);
ArrayList<ByteBuf> written = new ArrayList<>();
answerRequest(connectionFactory, connection, location, CreateTransientSegment.class, r -> new SegmentCreated(r.getRequestId(), "transient-segment"));
answerRequest(connectionFactory, connection, location, SetupAppend.class, r -> new AppendSetup(r.getRequestId(), segment.getScopedName(), r.getWriterId(), WireCommands.NULL_ATTRIBUTE_VALUE));
answerRequest(connectionFactory, connection, location, ConditionalBlockEnd.class, r -> {
ByteBuf data = r.getData();
written.add(data);
return new DataAppended(r.getRequestId(), r.getWriterId(), r.getEventNumber(), r.getEventNumber() - 1, r.getExpectedOffset() + data.readableBytes());
});
answerRequest(connectionFactory, connection, location, MergeSegments.class, r -> {
return new SegmentsMerged(r.getRequestId(), r.getSource(), r.getTarget(), -1);
});
LargeEventWriter writer = new LargeEventWriter(writerId, controller, connectionFactory);
EmptyTokenProviderImpl tokenProvider = new EmptyTokenProviderImpl();
ArrayList<ByteBuffer> buffers = new ArrayList<>();
buffers.add(ByteBuffer.allocate(Serializer.MAX_EVENT_SIZE * 2 + 1));
buffers.add(ByteBuffer.allocate(Serializer.MAX_EVENT_SIZE));
buffers.add(ByteBuffer.allocate(5));
writer.writeLargeEvent(segment, buffers, tokenProvider, EventWriterConfig.builder().enableLargeEvents(true).build());
assertEquals(4, written.size());
assertEquals(Serializer.MAX_EVENT_SIZE, written.get(0).readableBytes());
assertEquals(Serializer.MAX_EVENT_SIZE, written.get(1).readableBytes());
assertEquals(Serializer.MAX_EVENT_SIZE, written.get(2).readableBytes());
assertEquals(6 + WireCommands.TYPE_PLUS_LENGTH_SIZE * 3, written.get(3).readableBytes());
}
use of io.pravega.shared.protocol.netty.PravegaNodeUri in project pravega by pravega.
the class LargeEventWriterTest method testRetriedErrors.
@Test(timeout = 5000)
public void testRetriedErrors() throws ConnectionFailedException, NoSuchSegmentException, AuthenticationException, SegmentSealedException {
Segment segment = Segment.fromScopedName("foo/bar/1");
MockConnectionFactoryImpl connectionFactory = new MockConnectionFactoryImpl();
MockController controller = new MockController("localhost", 0, connectionFactory, false);
ClientConnection connection = Mockito.mock(ClientConnection.class);
PravegaNodeUri location = new PravegaNodeUri("localhost", 0);
connectionFactory.provideConnection(location, connection);
EmptyTokenProviderImpl tokenProvider = new EmptyTokenProviderImpl();
ArrayList<ByteBuffer> events = new ArrayList<>();
events.add(ByteBuffer.allocate(1));
AtomicBoolean failed = new AtomicBoolean(false);
AtomicBoolean succeeded = new AtomicBoolean(false);
answerRequest(connectionFactory, connection, location, SetupAppend.class, r -> new AppendSetup(r.getRequestId(), segment.getScopedName(), r.getWriterId(), WireCommands.NULL_ATTRIBUTE_VALUE));
answerRequest(connectionFactory, connection, location, ConditionalBlockEnd.class, r -> {
ByteBuf data = r.getData();
return new DataAppended(r.getRequestId(), r.getWriterId(), r.getEventNumber(), r.getEventNumber() - 1, r.getExpectedOffset() + data.readableBytes());
});
answerRequest(connectionFactory, connection, location, MergeSegments.class, r -> {
return new SegmentsMerged(r.getRequestId(), r.getSource(), r.getTarget(), -1);
});
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
failed.set(true);
connectionFactory.getProcessor(location).process(new AuthTokenCheckFailed(argument.getRequestId(), "stacktrace", ErrorCode.TOKEN_EXPIRED));
return null;
}
}).doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
succeeded.set(true);
connectionFactory.getProcessor(location).process(new SegmentCreated(argument.getRequestId(), "transient-segment"));
return null;
}
}).when(connection).send(any(CreateTransientSegment.class));
LargeEventWriter writer = new LargeEventWriter(writerId, controller, connectionFactory);
writer.writeLargeEvent(segment, events, tokenProvider, EventWriterConfig.builder().build());
assertTrue(failed.getAndSet(false));
assertTrue(succeeded.getAndSet(false));
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
failed.set(true);
connectionFactory.getProcessor(location).process(new WrongHost(argument.getRequestId(), "foo/bar/1", null, "stacktrace"));
return null;
}
}).doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
succeeded.set(true);
connectionFactory.getProcessor(location).process(new SegmentCreated(argument.getRequestId(), "transient-segment"));
return null;
}
}).when(connection).send(any(CreateTransientSegment.class));
writer = new LargeEventWriter(writerId, controller, connectionFactory);
writer.writeLargeEvent(segment, events, tokenProvider, EventWriterConfig.builder().build());
assertTrue(failed.getAndSet(false));
assertTrue(succeeded.getAndSet(false));
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
failed.set(true);
connectionFactory.getProcessor(location).processingFailure(new ConnectionFailedException());
return null;
}
}).doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
succeeded.set(true);
connectionFactory.getProcessor(location).process(new SegmentCreated(argument.getRequestId(), "transient-segment"));
return null;
}
}).when(connection).send(any(CreateTransientSegment.class));
writer = new LargeEventWriter(writerId, controller, connectionFactory);
writer.writeLargeEvent(segment, events, tokenProvider, EventWriterConfig.builder().build());
assertTrue(failed.getAndSet(false));
assertTrue(succeeded.getAndSet(false));
}
use of io.pravega.shared.protocol.netty.PravegaNodeUri in project pravega by pravega.
the class LargeEventWriterTest method testSegmentSealed.
@Test(timeout = 5000)
public void testSegmentSealed() throws ConnectionFailedException, SegmentSealedException {
String scope = "scope";
String streamName = "stream";
StreamImpl stream = new StreamImpl(scope, streamName);
Segment segment = new Segment(scope, streamName, 0);
@Cleanup InlineExecutor executor = new InlineExecutor();
EventWriterConfig config = EventWriterConfig.builder().enableLargeEvents(true).build();
SegmentOutputStreamFactory streamFactory = Mockito.mock(SegmentOutputStreamFactory.class);
MockConnectionFactoryImpl connectionFactory = new MockConnectionFactoryImpl();
MockController controller = spy(new MockController("localhost", 0, connectionFactory, false));
controller.createScope(scope).join();
controller.createStream(scope, streamName, StreamConfiguration.builder().build());
PravegaNodeUri location = new PravegaNodeUri("localhost", 0);
ClientConnection connection = Mockito.mock(ClientConnection.class);
connectionFactory.provideConnection(location, connection);
SegmentOutputStream outputStream = Mockito.mock(SegmentOutputStream.class);
Mockito.when(streamFactory.createOutputStreamForSegment(any(), any(), any(), any())).thenReturn(outputStream);
AtomicBoolean failed = new AtomicBoolean(false);
AtomicBoolean succeeded = new AtomicBoolean(false);
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
failed.set(true);
connectionFactory.getProcessor(location).process(new SegmentIsSealed(argument.getRequestId(), segment.getScopedName(), "stacktrace", 0));
return null;
}
}).doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
CreateTransientSegment argument = (CreateTransientSegment) invocation.getArgument(0);
succeeded.set(true);
connectionFactory.getProcessor(location).process(new SegmentCreated(argument.getRequestId(), "transient-segment"));
return null;
}
}).when(connection).send(any(CreateTransientSegment.class));
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
List<Long> predecessors = Arrays.asList(0L);
return CompletableFuture.completedFuture(new StreamSegmentsWithPredecessors(ImmutableMap.of(new SegmentWithRange(new Segment(scope, streamName, NameUtils.computeSegmentId(1, 1)), 0.0, 0.5), predecessors, new SegmentWithRange(new Segment(scope, streamName, NameUtils.computeSegmentId(2, 1)), 0.5, 1.0), predecessors), ""));
}
}).when(controller).getSuccessors(segment);
answerRequest(connectionFactory, connection, location, SetupAppend.class, r -> new AppendSetup(r.getRequestId(), segment.getScopedName(), r.getWriterId(), WireCommands.NULL_ATTRIBUTE_VALUE));
answerRequest(connectionFactory, connection, location, ConditionalBlockEnd.class, r -> {
ByteBuf data = r.getData();
return new DataAppended(r.getRequestId(), r.getWriterId(), r.getEventNumber(), r.getEventNumber() - 1, r.getExpectedOffset() + data.readableBytes());
});
answerRequest(connectionFactory, connection, location, MergeSegments.class, r -> {
return new SegmentsMerged(r.getRequestId(), r.getSource(), r.getTarget(), -1);
});
@Cleanup EventStreamWriter<byte[]> writer = new EventStreamWriterImpl<>(stream, "id", controller, streamFactory, new ByteArraySerializer(), config, executor, executor, connectionFactory);
writer.writeEvent(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8 });
writer.writeEvent(new byte[Serializer.MAX_EVENT_SIZE * 2]);
assertTrue(failed.get());
assertTrue(succeeded.get());
InOrder order = Mockito.inOrder(connection, outputStream);
order.verify(outputStream).write(any(PendingEvent.class));
order.verify(outputStream).flush();
order.verify(connection, times(2)).send(any(CreateTransientSegment.class));
order.verify(connection).send(any(SetupAppend.class));
order.verify(connection, times(3)).send(any(ConditionalBlockEnd.class));
order.verify(connection).send(any(MergeSegments.class));
order.verify(connection).close();
order.verifyNoMoreInteractions();
}
use of io.pravega.shared.protocol.netty.PravegaNodeUri in project pravega by pravega.
the class LargeEventWriterTest method testEventStreamWriter.
@Test(timeout = 5000)
public void testEventStreamWriter() throws ConnectionFailedException, SegmentSealedException {
String scope = "scope";
String streamName = "stream";
StreamImpl stream = new StreamImpl(scope, streamName);
Segment segment = new Segment(scope, streamName, 0);
@Cleanup InlineExecutor executor = new InlineExecutor();
EventWriterConfig config = EventWriterConfig.builder().enableLargeEvents(true).build();
SegmentOutputStreamFactory streamFactory = Mockito.mock(SegmentOutputStreamFactory.class);
MockConnectionFactoryImpl connectionFactory = new MockConnectionFactoryImpl();
MockController controller = new MockController("localhost", 0, connectionFactory, false);
controller.createScope(scope).join();
controller.createStream(scope, streamName, StreamConfiguration.builder().build());
PravegaNodeUri location = new PravegaNodeUri("localhost", 0);
ClientConnection connection = Mockito.mock(ClientConnection.class);
connectionFactory.provideConnection(location, connection);
SegmentOutputStream outputStream = Mockito.mock(SegmentOutputStream.class);
Mockito.when(streamFactory.createOutputStreamForSegment(eq(segment), any(), any(), any())).thenReturn(outputStream);
answerRequest(connectionFactory, connection, location, CreateTransientSegment.class, r -> new SegmentCreated(r.getRequestId(), "transient-segment"));
answerRequest(connectionFactory, connection, location, SetupAppend.class, r -> new AppendSetup(r.getRequestId(), segment.getScopedName(), r.getWriterId(), WireCommands.NULL_ATTRIBUTE_VALUE));
answerRequest(connectionFactory, connection, location, ConditionalBlockEnd.class, r -> {
ByteBuf data = r.getData();
return new DataAppended(r.getRequestId(), r.getWriterId(), r.getEventNumber(), r.getEventNumber() - 1, r.getExpectedOffset() + data.readableBytes());
});
answerRequest(connectionFactory, connection, location, MergeSegments.class, r -> {
return new SegmentsMerged(r.getRequestId(), r.getSource(), r.getTarget(), -1);
});
@Cleanup EventStreamWriter<byte[]> writer = new EventStreamWriterImpl<>(stream, "id", controller, streamFactory, new ByteArraySerializer(), config, executor, executor, connectionFactory);
writer.writeEvent(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8 });
writer.writeEvent(new byte[Serializer.MAX_EVENT_SIZE * 2]);
InOrder order = Mockito.inOrder(connection, outputStream);
order.verify(outputStream).write(any(PendingEvent.class));
order.verify(outputStream).flush();
order.verify(connection).send(any(CreateTransientSegment.class));
order.verify(connection).send(any(SetupAppend.class));
order.verify(connection, times(3)).send(any(ConditionalBlockEnd.class));
order.verify(connection).send(any(MergeSegments.class));
order.verify(connection).close();
order.verifyNoMoreInteractions();
}
use of io.pravega.shared.protocol.netty.PravegaNodeUri in project pravega by pravega.
the class ReaderGroupStateManagerTest method testSegmentsAssigned.
@Test(timeout = 10000)
public void testSegmentsAssigned() throws ReaderNotInReaderGroupException {
String scope = "scope";
String stream = "stream";
PravegaNodeUri endpoint = new PravegaNodeUri("localhost", SERVICE_PORT);
MockConnectionFactoryImpl connectionFactory = new MockConnectionFactoryImpl();
MockController controller = new MockController(endpoint.getEndpoint(), endpoint.getPort(), connectionFactory, false);
createScopeAndStream(scope, stream, controller);
MockSegmentStreamFactory streamFactory = new MockSegmentStreamFactory();
@Cleanup SynchronizerClientFactory clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory, streamFactory, streamFactory, streamFactory, streamFactory);
SynchronizerConfig config = SynchronizerConfig.builder().build();
@Cleanup StateSynchronizer<ReaderGroupState> stateSynchronizer = createState(stream, clientFactory, config);
AtomicLong clock = new AtomicLong();
Map<SegmentWithRange, Long> segments = new HashMap<>();
segments.put(new SegmentWithRange(new Segment(scope, stream, 0), 0.0, 0.25), 0L);
segments.put(new SegmentWithRange(new Segment(scope, stream, 1), 0.25, 0.5), 1L);
segments.put(new SegmentWithRange(new Segment(scope, stream, 2), 0.5, 0.75), 2L);
segments.put(new SegmentWithRange(new Segment(scope, stream, 3), 0.75, 1.0), 3L);
stateSynchronizer.initialize(new ReaderGroupState.ReaderGroupStateInit(ReaderGroupConfig.builder().stream(Stream.of(scope, stream)).build(), segments, Collections.emptyMap(), false));
ReaderGroupStateManager reader1 = new ReaderGroupStateManager(scope, stream, "reader1", stateSynchronizer, controller, clock::get);
reader1.initializeReader(0);
ReaderGroupStateManager reader2 = new ReaderGroupStateManager(scope, stream, "reader2", stateSynchronizer, controller, clock::get);
reader2.initializeReader(0);
Map<SegmentWithRange, Long> segments1 = reader1.acquireNewSegmentsIfNeeded(0, new PositionImpl(Collections.emptyMap()));
assertFalse(segments1.isEmpty());
assertEquals(2, segments1.size());
assertTrue(reader1.acquireNewSegmentsIfNeeded(0, new PositionImpl(segments1)).isEmpty());
assertNull(reader1.findSegmentToReleaseIfRequired());
Map<SegmentWithRange, Long> segments2 = reader2.acquireNewSegmentsIfNeeded(0, new PositionImpl(Collections.emptyMap()));
assertFalse(segments2.isEmpty());
assertEquals(2, segments2.size());
assertTrue(reader2.acquireNewSegmentsIfNeeded(0, new PositionImpl(segments2)).isEmpty());
assertNull(reader2.findSegmentToReleaseIfRequired());
assertTrue(Sets.intersection(segments1.keySet(), segments2.keySet()).isEmpty());
clock.addAndGet(ReaderGroupStateManager.UPDATE_WINDOW.toNanos());
assertTrue(reader1.acquireNewSegmentsIfNeeded(0, new PositionImpl(segments1)).isEmpty());
assertNull(reader1.findSegmentToReleaseIfRequired());
assertTrue(reader2.acquireNewSegmentsIfNeeded(0, new PositionImpl(segments2)).isEmpty());
assertNull(reader2.findSegmentToReleaseIfRequired());
reader1.readerShutdown(new PositionImpl(segments1));
clock.addAndGet(ReaderGroupStateManager.UPDATE_WINDOW.toNanos());
Map<SegmentWithRange, Long> segmentsRecovered = reader2.acquireNewSegmentsIfNeeded(0, new PositionImpl(segments2));
assertFalse(segmentsRecovered.isEmpty());
assertEquals(2, segmentsRecovered.size());
assertEquals(segments1, segmentsRecovered);
assertTrue(reader2.acquireNewSegmentsIfNeeded(0, new PositionImpl(segments)).isEmpty());
assertNull(reader2.findSegmentToReleaseIfRequired());
segments2.putAll(segmentsRecovered);
reader2.readerShutdown(new PositionImpl(segments2));
reader1.initializeReader(0);
segments1 = reader1.acquireNewSegmentsIfNeeded(0, new PositionImpl(Collections.emptyMap()));
assertEquals(4, segments1.size());
assertEquals(segments2, segments1);
}
Aggregations