use of io.pravega.test.integration.demo.ClusterWrapper in project pravega by pravega.
the class ReadWithReadPermissionsTest method readsFromADifferentScopeTest.
@Test
public void readsFromADifferentScopeTest() {
String marketDataWriter = "writer";
String marketDataReader = "reader";
String password = "test-password";
String marketDataScope = "marketdata";
String computeScope = "compute";
String stream1 = "stream1";
final Map<String, String> passwordInputFileEntries = new HashMap<>();
passwordInputFileEntries.put(marketDataWriter, String.join(";", // Allows user to create the "marketdata" scope, for this test
"prn::/,READ_UPDATE", // Allows user to create stream (and other scope children)
"prn::/scope:marketdata,READ_UPDATE", // Provides user all access to child objects of the "marketdata" scope
"prn::/scope:marketdata/*,READ_UPDATE"));
passwordInputFileEntries.put(marketDataReader, String.join(";", // Allows use to create the "compute" home scope
"prn::/,READ_UPDATE", // Allows user to create reader-group under its home scope
"prn::/scope:compute,READ_UPDATE", // Provides user all access to child objects of the "compute" scope
"prn::/scope:compute/*,READ_UPDATE", // Provides use read access to the "marketdata/stream1" stream.
"prn::/scope:marketdata/stream:stream1,READ"));
// Setup and run the servers
@Cleanup final ClusterWrapper cluster = ClusterWrapper.builder().authEnabled(true).tokenSigningKeyBasis("secret").tokenTtlInSeconds(600).rgWritesWithReadPermEnabled(false).passwordAuthHandlerEntries(TestUtils.preparePasswordInputFileEntries(passwordInputFileEntries, password)).build();
cluster.start();
// Prepare a client config for the `marketDataWriter`, whose home scope is "marketdata"
final ClientConfig writerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).credentials(new DefaultCredentials(password, marketDataWriter)).build();
// Create scope/stream `marketdata/stream1`
TestUtils.createScopeAndStreams(writerClientConfig, marketDataScope, Arrays.asList(stream1));
// Write a message to stream `marketdata/stream1`
TestUtils.writeDataToStream(marketDataScope, stream1, "test message", writerClientConfig);
// Prepare a client config for `marketDataReader`, whose home scope is "compute"
ClientConfig readerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).credentials(new DefaultCredentials(password, marketDataReader)).build();
// Create scope `compute` (without any streams)
TestUtils.createScopeAndStreams(readerClientConfig, computeScope, new ArrayList<>());
// Create a reader group config that enables a user to read data from `marketdata/stream1`
ReaderGroupConfig readerGroupConfig = ReaderGroupConfig.builder().stream(Stream.of(marketDataScope, stream1)).disableAutomaticCheckpoints().build();
// Create a reader-group for user `marketDataReader` in `compute` scope, which is its home scope.
@Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(computeScope, readerClientConfig);
readerGroupManager.createReaderGroup("testRg", readerGroupConfig);
@Cleanup EventStreamClientFactory readerClientFactory = EventStreamClientFactory.withScope(computeScope, readerClientConfig);
@Cleanup EventStreamReader<String> reader = readerClientFactory.createReader("readerId", "testRg", new JavaSerializer<String>(), ReaderConfig.builder().initialAllocationDelay(0).build());
String readMessage = reader.readNextEvent(5000).getEvent();
assertEquals("test message", readMessage);
}
use of io.pravega.test.integration.demo.ClusterWrapper in project pravega by pravega.
the class ReadWithReadPermissionsTest method writeThenReadDataBack.
@SneakyThrows
private void writeThenReadDataBack(Map<String, String> passwordInputFileEntries, boolean writeToInternalStreamsWithReadPermission) {
final String scopeName = "MarketData";
final String streamName = "StockPriceUpdates";
final String readerGroupName = "PriceChangeCalculator";
final String message = "SCRIP:DELL,EXCHANGE:NYSE,PRICE=100";
final String pwd = "secret-password";
// Setup the cluster and create the objects
@Cleanup final ClusterWrapper cluster = ClusterWrapper.builder().authEnabled(true).tokenSigningKeyBasis("secret").tokenTtlInSeconds(600).rgWritesWithReadPermEnabled(writeToInternalStreamsWithReadPermission).passwordAuthHandlerEntries(TestUtils.preparePasswordInputFileEntries(passwordInputFileEntries, pwd)).build();
cluster.start();
final ClientConfig writerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).credentials(new DefaultCredentials(pwd, "creator")).build();
TestUtils.createScopeAndStreams(writerClientConfig, scopeName, Arrays.asList(streamName));
TestUtils.writeDataToStream(scopeName, streamName, message, writerClientConfig);
// Now, read data back using the reader account.
ClientConfig readerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).credentials(new DefaultCredentials(pwd, "reader")).build();
String readMessage = TestUtils.readNextEventMessage(scopeName, streamName, readerClientConfig, readerGroupName);
log.info("Done reading event [{}]", readMessage);
assertEquals(message, readMessage);
}
use of io.pravega.test.integration.demo.ClusterWrapper in project pravega by pravega.
the class ClusterWrapperTest method writeAndReadBackAMessageWithTlsAndAuthOn.
@Test
public void writeAndReadBackAMessageWithTlsAndAuthOn() {
String scopeName = "testScope";
String streamName = "testStream";
String readerGroupName = "testReaderGroup";
String testMessage = "test message";
String password = "secret-password";
final Map<String, String> passwordInputFileEntries = new HashMap<>();
passwordInputFileEntries.put("writer", "prn::*,READ_UPDATE");
passwordInputFileEntries.put("reader", String.join(";", "prn::/scope:testScope,READ", "prn::/scope:testScope/stream:testStream,READ", "prn::/scope:testScope/reader-group:testReaderGroup,READ"));
// Instantiate and run the cluster
@Cleanup ClusterWrapper cluster = ClusterWrapper.builder().authEnabled(true).passwordAuthHandlerEntries(TestUtils.preparePasswordInputFileEntries(passwordInputFileEntries, password)).tlsEnabled(true).tlsProtocolVersion(SecurityConfigDefaults.TLS_PROTOCOL_VERSION).tlsServerCertificatePath(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_CERT_FILE_NAME).tlsServerKeyPath(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_PRIVATE_KEY_FILE_NAME).tlsHostVerificationEnabled(false).build();
cluster.start();
// Write an event to the stream
final ClientConfig writerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).trustStore(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_CERT_FILE_NAME).validateHostName(false).credentials(new DefaultCredentials(password, "writer")).build();
TestUtils.createScopeAndStreams(writerClientConfig, scopeName, Arrays.asList(streamName));
TestUtils.writeDataToStream(scopeName, streamName, testMessage, writerClientConfig);
// Read back the event from the stream and verify it is the same as what was written
final ClientConfig readerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).trustStore(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_CERT_FILE_NAME).validateHostName(false).credentials(new DefaultCredentials(password, "reader")).build();
String readMessage = TestUtils.readNextEventMessage(scopeName, streamName, readerClientConfig, readerGroupName);
assertEquals(testMessage, readMessage);
}
use of io.pravega.test.integration.demo.ClusterWrapper in project pravega by pravega.
the class ClusterWrapperTest method restApiInvocationWithSecurityEnabled.
@SneakyThrows
@Test
public void restApiInvocationWithSecurityEnabled() {
String restApiUser = "rest-api-user";
String restApiUserPwd = "super-secret";
final Map<String, String> passwordInputFileEntries = new HashMap<>();
passwordInputFileEntries.put("restApiUser", "prn::*,READ_UPDATE");
// Setup and start the cluster
@Cleanup ClusterWrapper cluster = ClusterWrapper.builder().controllerRestEnabled(true).authEnabled(true).passwordAuthHandlerEntries(TestUtils.preparePasswordInputFileEntries(passwordInputFileEntries, restApiUserPwd)).tlsEnabled(true).tlsProtocolVersion(SecurityConfigDefaults.TLS_PROTOCOL_VERSION).tlsServerCertificatePath(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_CERT_FILE_NAME).tlsServerKeyPath(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_PRIVATE_KEY_FILE_NAME).tlsHostVerificationEnabled(false).tlsServerKeystorePath(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_SERVER_KEYSTORE_NAME).tlsServerKeystorePasswordPath(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_PASSWORD_FILE_NAME).build();
cluster.start();
// Setup REST client config
org.glassfish.jersey.client.ClientConfig clientConfig = new org.glassfish.jersey.client.ClientConfig();
clientConfig.register(JacksonJsonProvider.class);
clientConfig.property("sun.net.http.allowRestrictedHeaders", "true");
clientConfig.register(HttpAuthenticationFeature.basic(restApiUser, restApiUserPwd));
// Prepare a TLS context with truststore containing the signing CA's vertificate
KeyStore trustStore = createTrustStore(TestUtils.pathToConfig() + SecurityConfigDefaults.TLS_CA_CERT_FILE_NAME);
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
tmf.init(trustStore);
SSLContext tlsContext = SSLContext.getInstance("TLS");
tlsContext.init(null, tmf.getTrustManagers(), null);
// Instantiate the REST client
javax.ws.rs.client.Client client = ClientBuilder.newBuilder().withConfig(clientConfig).sslContext(tlsContext).build();
String restServerURI = cluster.controllerRestUri();
log.info("REST Server URI: {}", restServerURI);
// Invoke the REST operation
String resourceURl = new StringBuilder(restServerURI).append("/ping").toString();
WebTarget webTarget = client.target(resourceURl);
Invocation.Builder builder = webTarget.request();
// Check if the response was as expected
Response response = builder.get();
assertEquals("Response to /ping was not OK", OK.getStatusCode(), response.getStatus());
log.info("Ping successful.");
response.close();
client.close();
}
use of io.pravega.test.integration.demo.ClusterWrapper in project pravega by pravega.
the class DelegationTokenTest method testDelegationTokenGetsRenewedAfterExpiry.
/**
* This test verifies that a event stream reader continues to read events as a result of automatic delegation token
* renewal, after the initial delegation token it uses expires.
*
* We use an extraordinarily high test timeout and read timeouts to account for any inordinate delays that may be
* encountered in testing environments.
*/
@Test(timeout = 50000)
public void testDelegationTokenGetsRenewedAfterExpiry() throws InterruptedException {
// Delegation token renewal threshold is 5 seconds, so we are using 6 seconds as Token TTL so that token doesn't
// get renewed before each use.
ClusterWrapper pravegaCluster = ClusterWrapper.builder().authEnabled(true).tokenTtlInSeconds(6).build();
try {
pravegaCluster.start();
final String scope = "testscope";
final String streamName = "teststream";
final int numSegments = 1;
final ClientConfig clientConfig = ClientConfig.builder().controllerURI(URI.create(pravegaCluster.controllerUri())).credentials(new DefaultCredentials("1111_aaaa", "admin")).build();
log.debug("Done creating client config.");
createScopeStream(scope, streamName, numSegments, clientConfig);
@Cleanup final EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig);
// Perform writes on a separate thread.
Runnable runnable = () -> {
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<String>(), EventWriterConfig.builder().build());
for (int i = 0; i < 10; i++) {
String msg = "message: " + i;
writer.writeEvent(msg).join();
log.debug("Done writing message '{}' to stream '{} / {}'", msg, scope, streamName);
}
};
@Cleanup("interrupt") Thread writerThread = new Thread(runnable);
writerThread.start();
// Now, read the events from the stream.
String readerGroup = UUID.randomUUID().toString().replace("-", "");
ReaderGroupConfig readerGroupConfig = ReaderGroupConfig.builder().stream(Stream.of(scope, streamName)).disableAutomaticCheckpoints().build();
@Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(scope, clientConfig);
readerGroupManager.createReaderGroup(readerGroup, readerGroupConfig);
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", readerGroup, new JavaSerializer<String>(), ReaderConfig.builder().build());
int j = 0;
EventRead<String> event = null;
do {
event = reader.readNextEvent(2000);
if (event.getEvent() != null) {
log.info("Done reading event: {}", event.getEvent());
j++;
}
// We are keeping sleep time relatively large, just to make sure that the delegation token expires
// midway.
Thread.sleep(500);
} while (event.getEvent() != null);
// Assert that we end up reading 10 events even though delegation token must have expired midway.
//
// To look for evidence of delegation token renewal check the logs for the following message:
// - "Token is nearing expiry, so refreshing it"
assertSame(10, j);
} finally {
pravegaCluster.close();
}
}
Aggregations