use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class RetentionTest method testRetentionSize.
@Test(timeout = 30000)
public void testRetentionSize() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(2)).retentionPolicy(RetentionPolicy.bySizeBytes(10)).build();
LocalController controller = (LocalController) controllerWrapper.getController();
String name = "testsize";
Stream stream = new StreamImpl(name, name);
controllerWrapper.getControllerService().createScope(name, 0L).get();
controller.createStream(name, name, config).get();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(controllerURI).build());
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(name, controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(name, new JavaSerializer<>(), EventWriterConfig.builder().build());
Map<Segment, Long> x = controller.getSegmentsAtTime(stream, 0L).join();
assertTrue(x.values().stream().allMatch(a -> a == 0));
AtomicBoolean continueLoop = new AtomicBoolean(true);
Futures.loop(continueLoop::get, () -> writer.writeEvent("a"), executor);
AssertExtensions.assertEventuallyEquals(true, () -> controller.getSegmentsAtTime(stream, 0L).join().values().stream().anyMatch(a -> a > 0), 30 * 1000L);
continueLoop.set(false);
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class RetentionTest method testRetentionTime.
@Test(timeout = 30000)
public void testRetentionTime() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(2)).retentionPolicy(RetentionPolicy.byTime(Duration.ofSeconds(1))).build();
LocalController controller = (LocalController) controllerWrapper.getController();
String name = "testtime";
Stream stream = new StreamImpl(name, name);
controllerWrapper.getControllerService().createScope(name, 0L).get();
controller.createStream(name, name, config).get();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(controllerURI).build());
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(name, controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(name, new JavaSerializer<>(), EventWriterConfig.builder().build());
Map<Segment, Long> x = controller.getSegmentsAtTime(stream, 0L).join();
assertTrue(x.values().stream().allMatch(a -> a == 0));
AtomicBoolean continueLoop = new AtomicBoolean(true);
Futures.loop(continueLoop::get, () -> writer.writeEvent("a"), executor);
AssertExtensions.assertEventuallyEquals(true, () -> controller.getSegmentsAtTime(stream, 0L).join().values().stream().anyMatch(a -> a > 0), 30 * 1000L);
continueLoop.set(false);
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class RequestHandlersTest method setup.
@Before
public void setup() throws Exception {
StreamMetrics.initialize();
TransactionMetrics.initialize();
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(20, 1, 50));
zkClient.start();
String hostId;
try {
// On each controller process restart, it gets a fresh hostId,
// which is a combination of hostname and random GUID.
hostId = InetAddress.getLocalHost().getHostAddress() + UUID.randomUUID().toString();
} catch (UnknownHostException e) {
hostId = UUID.randomUUID().toString();
}
streamStore = spy(getStore());
bucketStore = StreamStoreFactory.createZKBucketStore(zkClient, executor);
taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
segmentHelper = SegmentHelperMock.getSegmentHelperMock();
clientFactory = mock(EventStreamClientFactory.class);
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelper, executor, hostId, GrpcAuthHelper.getDisabledAuthHelper());
doAnswer(x -> new EventStreamWriterMock<>()).when(clientFactory).createEventWriter(anyString(), any(), any());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelper, executor, hostId, GrpcAuthHelper.getDisabledAuthHelper());
streamMetadataTasks.initializeStreamWriters(clientFactory, Config.SCALE_STREAM_NAME);
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelper, executor, hostId, GrpcAuthHelper.getDisabledAuthHelper());
streamTransactionMetadataTasks.initializeStreamWriters(new EventStreamWriterMock<>(), new EventStreamWriterMock<>());
kvtStore = spy(getKvtStore());
kvtTasks = mock(TableMetadataTasks.class);
long createTimestamp = System.currentTimeMillis();
// add a host in zk
// mock pravega
// create a stream
streamStore.createScope(scope, null, executor).get();
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class ScaleRequestHandlerTest method setup.
@Before
public void setup() throws Exception {
StreamMetrics.initialize();
TransactionMetrics.initialize();
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(20, 1, 50));
zkClient.start();
String hostId;
try {
// On each controller process restart, it gets a fresh hostId,
// which is a combination of hostname and random GUID.
hostId = InetAddress.getLocalHost().getHostAddress() + UUID.randomUUID().toString();
} catch (UnknownHostException e) {
hostId = UUID.randomUUID().toString();
}
streamStore = spy(getStore());
bucketStore = StreamStoreFactory.createZKBucketStore(zkClient, executor);
taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
clientFactory = mock(EventStreamClientFactory.class);
SegmentHelper segmentHelper = SegmentHelperMock.getSegmentHelperMock();
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelper, executor, hostId, GrpcAuthHelper.getDisabledAuthHelper());
streamMetadataTasks.initializeStreamWriters(clientFactory, Config.SCALE_STREAM_NAME);
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelper, executor, hostId, GrpcAuthHelper.getDisabledAuthHelper());
long createTimestamp = System.currentTimeMillis();
// add a host in zk
// mock pravega
// create a stream
streamStore.createScope(scope, null, executor).get();
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(1, 2, 3)).build();
streamMetadataTasks.createStream(scope, stream, config, createTimestamp, 0L).get();
// set minimum number of segments to 1 so that we can also test scale downs
config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(1, 2, 1)).build();
streamStore.startUpdateConfiguration(scope, stream, config, null, executor).join();
VersionedMetadata<StreamConfigurationRecord> configRecord = streamStore.getConfigurationRecord(scope, stream, null, executor).join();
streamStore.completeUpdateConfiguration(scope, stream, configRecord, null, executor).join();
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class EndToEndCBRTest method testReaderGroupAutoRetention.
@Test(timeout = 60000)
public void testReaderGroupAutoRetention() throws Exception {
String scope = "test";
String streamName = "testReaderGroupAutoRetention";
String groupName = "testReaderGroupAutoRetention-group";
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).retentionPolicy(RetentionPolicy.bySizeBytes(10, Long.MAX_VALUE)).build();
LocalController controller = (LocalController) PRAVEGA.getLocalController();
controller.createScope(scope).get();
controller.createStream(scope, streamName, config).get();
Stream stream = Stream.of(scope, streamName);
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
// write events
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
writer.writeEvent("1", "e1").join();
writer.writeEvent("2", "e2").join();
// Create a ReaderGroup
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl(scope, controller, clientFactory);
groupManager.createReaderGroup(groupName, ReaderGroupConfig.builder().disableAutomaticCheckpoints().retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT).stream(stream).build());
// Create a Reader
AtomicLong clock = new AtomicLong();
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("reader1", groupName, serializer, ReaderConfig.builder().build(), clock::get, clock::get);
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
EventRead<String> read = reader.readNextEvent(60000);
assertEquals("e1", read.getEvent());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
@Cleanup("shutdown") final InlineExecutor backgroundExecutor = new InlineExecutor();
ReaderGroup readerGroup = groupManager.getReaderGroup(groupName);
CompletableFuture<Checkpoint> checkpoint = readerGroup.initiateCheckpoint("Checkpoint", backgroundExecutor);
assertFalse(checkpoint.isDone());
read = reader.readNextEvent(60000);
assertTrue(read.isCheckpoint());
assertEquals("Checkpoint", read.getCheckpointName());
assertNull(read.getEvent());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
read = reader.readNextEvent(60000);
assertEquals("e2", read.getEvent());
Checkpoint cpResult = checkpoint.get(5, TimeUnit.SECONDS);
assertTrue(checkpoint.isDone());
assertEquals("Checkpoint", cpResult.getName());
read = reader.readNextEvent(100);
assertNull(read.getEvent());
assertFalse(read.isCheckpoint());
AssertExtensions.assertEventuallyEquals(true, () -> controller.getSegmentsAtTime(new StreamImpl(scope, streamName), 0L).join().values().stream().anyMatch(off -> off > 0), 30 * 1000L);
String group2 = groupName + "2";
groupManager.createReaderGroup(group2, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(NameUtils.getScopedStreamName(scope, streamName)).build());
EventStreamReader<String> reader2 = clientFactory.createReader("reader2", group2, serializer, ReaderConfig.builder().build());
EventRead<String> eventRead2 = reader2.readNextEvent(10000);
assertEquals("e2", eventRead2.getEvent());
}
Aggregations