use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class ControllerServiceWithKVTableTest method setup.
@Before
public void setup() {
segmentHelperMock = SegmentHelperMock.getSegmentHelperMockForTables(executor);
streamStore = spy(getStore());
kvtStore = spy(getKVTStore());
BucketStore bucketStore = StreamStoreFactory.createZKBucketStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
GrpcAuthHelper disabledAuthHelper = GrpcAuthHelper.getDisabledAuthHelper();
StreamMetrics.initialize();
TransactionMetrics.initialize();
EventHelper helperMock = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", disabledAuthHelper, helperMock);
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", disabledAuthHelper);
kvtMetadataTasks = spy(new TableMetadataTasks(kvtStore, segmentHelperMock, executor, executor, "host", GrpcAuthHelper.getDisabledAuthHelper(), helperMock));
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, executor), executor);
streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor));
consumer = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class StreamMetaDataAuthFocusedTests method initializer.
// region Test class initializer and cleanup
@BeforeClass
public static void initializer() throws IOException, InvalidKeySpecException, NoSuchAlgorithmException {
passwordHandlerInputFile = File.createTempFile("AuthFocusedTests", ".txt");
StrongPasswordProcessor passwordEncryptor = StrongPasswordProcessor.builder().build();
try (FileWriter writer = new FileWriter(passwordHandlerInputFile.getAbsolutePath())) {
String encryptedPassword = passwordEncryptor.encryptPassword(DEFAULT_PASSWORD);
// This user can do anything in the system.
writer.write(credentialsAndAclAsString(USER_PRIVILEGED, encryptedPassword, "prn::*,READ_UPDATE"));
writer.write(credentialsAndAclAsString(USER_SCOPE_CREATOR, encryptedPassword, "prn::/,READ_UPDATE"));
// This user can list scopes and upon listing will see all scopes (/*).
writer.write(credentialsAndAclAsString(USER_SCOPE_LISTER, encryptedPassword, "prn::/,READ;prn::/*,READ"));
// This user can list, read, update, delete all scopes. Upon listing scopes, this user will see all scopes.
writer.write(credentialsAndAclAsString(USER_SCOPE_MANAGER, encryptedPassword, "prn::/,READ_UPDATE;prn::/*,READ_UPDATE"));
// This user can create, update, delete all child objects of a scope (streams, reader groups, etc.)
writer.write(credentialsAndAclAsString(USER_STREAMS_IN_A_SCOPE_CREATOR, encryptedPassword, "prn::/scope:sisc-scope,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_USER1, encryptedPassword, "prn::/,READ_UPDATE;prn::/scope:scope1,READ_UPDATE;prn::/scope:scope2,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_WITH_NO_ROOT_ACCESS, encryptedPassword, "prn::/scope:scope1,READ_UPDATE;prn::/scope:scope2,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_UNAUTHORIZED, encryptedPassword, "prn::/,READ_UPDATE;prn::/scope:scope1,READ_UPDATE;prn::/scope:scope2,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_ACCESS_TO_SUBSET_OF_SCOPES, encryptedPassword, "prn::/,READ;prn::/scope:scope3,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_WITH_NO_AUTHORIZATIONS, encryptedPassword, ";"));
writer.write(credentialsAndAclAsString(USER_WITH_READ_UPDATE_ROOT, encryptedPassword, "prn::/scope:scopeToDelete,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_ACCESS_TO_SCOPES_BUT_NOSTREAMS, encryptedPassword, "prn::/scope:myscope,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_ACCESS_TO_SCOPES_READ_ALLSTREAMS, encryptedPassword, "prn::/scope:myscope,READ_UPDATE;prn::/scope:myscope/*,READ;"));
writer.write(credentialsAndAclAsString(USER_ACCESS_TO_SCOPES_READUPDATE_ALLSTREAMS, encryptedPassword, "prn::/scope:myscope,READ_UPDATE;prn::/scope:myscope/*,READ_UPDATE;"));
writer.write(credentialsAndAclAsString(USER_ACCESS_TO_SCOPE_WRITE_SPECIFIC_STREAM, encryptedPassword, "prn::/scope:myscope,READ_UPDATE;prn::/scope:myscope/stream:stream1,READ_UPDATE;"));
}
AuthHandlerManager authManager = new AuthHandlerManager(GRPCServerConfigImpl.builder().authorizationEnabled(true).userPasswordFile(passwordHandlerInputFile.getAbsolutePath()).port(1000).build());
ServerBuilder<?> server = ServerBuilder.forPort(TestUtils.getAvailableListenPort());
GrpcAuthHelper.registerInterceptors(authManager.getHandlerMap(), server);
mockControllerService = mock(ControllerService.class);
serverConfig = RESTServerConfigImpl.builder().host("localhost").port(TestUtils.getAvailableListenPort()).build();
LocalController controller = new LocalController(mockControllerService, false, "");
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
restServer = new RESTServer(serverConfig, Set.of(new StreamMetadataResourceImpl(controller, mockControllerService, authManager, connectionFactory, ClientConfig.builder().build())));
restServer.startAsync();
restServer.awaitRunning();
client = ClientBuilder.newClient();
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class StreamMetaDataTests method setup.
@Before
public void setup() throws Exception {
mockControllerService = mock(ControllerService.class);
serverConfig = RESTServerConfigImpl.builder().host("localhost").port(TestUtils.getAvailableListenPort()).build();
LocalController controller = new LocalController(mockControllerService, false, "");
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
restServer = new RESTServer(serverConfig, Set.of(new StreamMetadataResourceImpl(controller, mockControllerService, authManager, connectionFactory, ClientConfig.builder().build())));
restServer.startAsync();
restServer.awaitRunning();
client = ClientBuilder.newClient();
scalingPolicyCommon.setType(ScalingConfig.TypeEnum.BY_RATE_IN_EVENTS_PER_SEC);
scalingPolicyCommon.setTargetRate(100);
scalingPolicyCommon.setScaleFactor(2);
scalingPolicyCommon.setMinSegments(2);
scalingPolicyCommon2.setType(ScalingConfig.TypeEnum.FIXED_NUM_SEGMENTS);
scalingPolicyCommon2.setMinSegments(2);
retentionPolicyCommon.setType(TypeEnum.LIMITED_DAYS);
retentionPolicyCommon.setValue(123L);
TimeBasedRetention timeRetention = new TimeBasedRetention();
retentionPolicyCommon.setTimeBasedRetention(timeRetention.days(123L).hours(0L).minutes(0L));
retentionPolicyCommon2.setType(null);
retentionPolicyCommon2.setValue(null);
retentionPolicyCommon2.setTimeBasedRetention(null);
streamResponseExpected.setScopeName(scope1);
streamResponseExpected.setStreamName(stream1);
streamResponseExpected.setScalingPolicy(scalingPolicyCommon);
streamResponseExpected.setRetentionPolicy(retentionPolicyCommon);
retentionPolicyGran.setType(TypeEnum.LIMITED_DAYS);
retentionPolicyGran.setValue(0L);
TimeBasedRetention tr = new TimeBasedRetention();
retentionPolicyGran.setTimeBasedRetention(tr.days(2L).hours(3L).minutes(5L));
streamResponseGranExpected.setScopeName(scope1);
streamResponseGranExpected.setStreamName(stream1);
streamResponseGranExpected.setScalingPolicy(scalingPolicyCommon);
streamResponseGranExpected.setRetentionPolicy(retentionPolicyGran);
retentionPolicyDateMins.setType(TypeEnum.LIMITED_DAYS);
retentionPolicyDateMins.setValue(0L);
TimeBasedRetention tr1 = new TimeBasedRetention();
retentionPolicyDateMins.setTimeBasedRetention(tr1.days(10L).hours(0L).minutes(50L));
streamResponseRetDaysMinsExpected.setScopeName(scope1);
streamResponseRetDaysMinsExpected.setStreamName(stream1);
streamResponseRetDaysMinsExpected.setScalingPolicy(scalingPolicyCommon);
streamResponseRetDaysMinsExpected.setRetentionPolicy(retentionPolicyDateMins);
retentionPolicyHoursMins.setType(TypeEnum.LIMITED_DAYS);
retentionPolicyHoursMins.setValue(0L);
TimeBasedRetention tr2 = new TimeBasedRetention();
retentionPolicyHoursMins.setTimeBasedRetention(tr2.days(0L).hours(13L).minutes(26L));
streamResponseRetHoursMinsExpected.setScopeName(scope1);
streamResponseRetHoursMinsExpected.setStreamName(stream1);
streamResponseRetHoursMinsExpected.setScalingPolicy(scalingPolicyCommon);
streamResponseRetHoursMinsExpected.setRetentionPolicy(retentionPolicyHoursMins);
retentionPolicyOnlyHours.setType(TypeEnum.LIMITED_DAYS);
retentionPolicyOnlyHours.setValue(0L);
TimeBasedRetention tr3 = new TimeBasedRetention();
retentionPolicyOnlyHours.setTimeBasedRetention(tr3.days(0L).hours(16L).minutes(0L));
streamResponseRetOnlyHoursExpected.setScopeName(scope1);
streamResponseRetOnlyHoursExpected.setStreamName(stream1);
streamResponseRetOnlyHoursExpected.setScalingPolicy(scalingPolicyCommon);
streamResponseRetOnlyHoursExpected.setRetentionPolicy(retentionPolicyOnlyHours);
retentionPolicyOnlyMins.setType(TypeEnum.LIMITED_DAYS);
retentionPolicyOnlyMins.setValue(0L);
TimeBasedRetention tr4 = new TimeBasedRetention();
retentionPolicyOnlyMins.setTimeBasedRetention(tr4.days(0L).hours(0L).minutes(32L));
streamResponseRetOnlyMinsExpected.setScopeName(scope1);
streamResponseRetOnlyMinsExpected.setStreamName(stream1);
streamResponseRetOnlyMinsExpected.setScalingPolicy(scalingPolicyCommon);
streamResponseRetOnlyMinsExpected.setRetentionPolicy(retentionPolicyOnlyMins);
createStreamRequest.setStreamName(stream1);
createStreamRequest.setScalingPolicy(scalingPolicyCommon);
createStreamRequest.setRetentionPolicy(retentionPolicyCommon);
createStreamRequest2.setStreamName(stream1);
createStreamRequest2.setScalingPolicy(scalingPolicyCommon);
createStreamRequest2.setRetentionPolicy(retentionPolicyCommon2);
createStreamRequest3.setStreamName(stream1);
createStreamRequest3.setScalingPolicy(scalingPolicyCommon);
createStreamRequest3.setRetentionPolicy(retentionPolicyCommon);
createStreamRequest4.setStreamName(stream3);
createStreamRequest4.setScalingPolicy(scalingPolicyCommon);
// stream 4 where targetRate and scalingFactor for Scaling Policy are null
createStreamRequest5.setStreamName(stream4);
createStreamRequest5.setScalingPolicy(scalingPolicyCommon2);
createStreamRequest5.setRetentionPolicy(retentionPolicyCommon);
streamResponseExpected2.setScopeName(scope1);
streamResponseExpected2.setStreamName(stream3);
streamResponseExpected2.setScalingPolicy(scalingPolicyCommon);
streamResponseExpected3.setScopeName(scope1);
streamResponseExpected3.setStreamName(stream4);
streamResponseExpected3.setScalingPolicy(scalingPolicyCommon2);
streamResponseExpected3.setRetentionPolicy(retentionPolicyCommon);
updateStreamRequest.setScalingPolicy(scalingPolicyCommon);
updateStreamRequest.setRetentionPolicy(retentionPolicyCommon);
updateStreamRequest2.setScalingPolicy(scalingPolicyCommon);
updateStreamRequest2.setRetentionPolicy(retentionPolicyCommon);
updateStreamRequest3.setScalingPolicy(scalingPolicyCommon);
updateStreamRequest3.setRetentionPolicy(retentionPolicyCommon2);
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class ControllerServiceWithStreamTest method setup.
@Before
public void setup() {
try {
zkServer = new TestingServerStarter().start();
} catch (Exception e) {
log.error("Error starting ZK server", e);
}
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
streamStore = spy(getStore());
kvtStore = spy(getKVTStore());
BucketStore bucketStore = StreamStoreFactory.createZKBucketStore(zkClient, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
GrpcAuthHelper disabledAuthHelper = GrpcAuthHelper.getDisabledAuthHelper();
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
StreamMetrics.initialize();
TransactionMetrics.initialize();
EventHelper helperMock = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", disabledAuthHelper, helperMock);
kvtMetadataTasks = spy(new TableMetadataTasks(kvtStore, segmentHelperMock, executor, executor, "host", GrpcAuthHelper.getDisabledAuthHelper(), helperMock));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", disabledAuthHelper);
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, executor), executor);
streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor));
consumer = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
}
use of io.pravega.client.connection.impl.SocketConnectionFactoryImpl in project pravega by pravega.
the class IntermittentCnxnFailureTest method setup.
@Before
public void setup() throws Exception {
MetricsConfig metricsConfig = MetricsConfig.builder().with(MetricsConfig.ENABLE_STATISTICS, true).with(MetricsConfig.ENABLE_STATSD_REPORTER, false).build();
metricsConfig.setDynamicCacheEvictionDuration(Duration.ofSeconds(60));
MetricsProvider.initialize(metricsConfig);
statsProvider = MetricsProvider.getMetricsProvider();
statsProvider.startWithoutExporting();
streamStore = spy(StreamStoreFactory.createZKStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor));
bucketStore = StreamStoreFactory.createZKBucketStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), new SocketConnectionFactoryImpl(ClientConfig.builder().build()));
segmentHelperMock = spy(new SegmentHelper(connectionPool, hostStore, executor));
doReturn(Controller.NodeUri.newBuilder().setEndpoint("localhost").setPort(Config.SERVICE_PORT).build()).when(segmentHelperMock).getSegmentUri(anyString(), anyString(), anyInt());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", GrpcAuthHelper.getDisabledAuthHelper());
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", GrpcAuthHelper.getDisabledAuthHelper());
controllerService = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
StreamMetrics.initialize();
controllerService.createScope(SCOPE, 0L).get();
}
Aggregations