use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class StreamTransactionMetadataTasksTest method idempotentOperationsTests.
@Test(timeout = 10000)
public void idempotentOperationsTests() throws CheckpointStoreException, InterruptedException {
// Create mock writer objects.
EventStreamWriterMock<CommitEvent> commitWriter = new EventStreamWriterMock<>();
EventStreamWriterMock<AbortEvent> abortWriter = new EventStreamWriterMock<>();
EventStreamReader<CommitEvent> commitReader = commitWriter.getReader();
EventStreamReader<AbortEvent> abortReader = abortWriter.getReader();
// Create transaction tasks.
txnTasks = new StreamTransactionMetadataTasks(streamStore, hostStore, segmentHelperMock, executor, "host", connectionFactory, false, "");
txnTasks.initializeStreamWriters("commitStream", commitWriter, "abortStream", abortWriter);
consumer = new ControllerService(streamStore, hostStore, streamMetadataTasks, txnTasks, segmentHelperMock, executor, null);
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(STREAM).scalingPolicy(policy1).build();
// Create stream and scope
Assert.assertEquals(Controller.CreateScopeStatus.Status.SUCCESS, consumer.createScope(SCOPE).join().getStatus());
Assert.assertEquals(Controller.CreateStreamStatus.Status.SUCCESS, streamMetadataTasks.createStream(SCOPE, STREAM, configuration1, System.currentTimeMillis()).join());
// Create 2 transactions
final long lease = 5000;
final long scaleGracePeriod = 10000;
VersionedTransactionData txData1 = txnTasks.createTxn(SCOPE, STREAM, lease, scaleGracePeriod, null).join().getKey();
VersionedTransactionData txData2 = txnTasks.createTxn(SCOPE, STREAM, lease, scaleGracePeriod, null).join().getKey();
UUID tx1 = txData1.getId();
UUID tx2 = txData2.getId();
int tx2Version = txData2.getVersion();
// Commit the first one
Assert.assertEquals(TxnStatus.COMMITTING, txnTasks.commitTxn(SCOPE, STREAM, tx1, null).join());
// Ensure that transaction state is COMMITTING.
assertEquals(TxnStatus.COMMITTING, streamStore.transactionStatus(SCOPE, STREAM, tx1, null, executor).join());
// Abort the second one
Assert.assertEquals(TxnStatus.ABORTING, txnTasks.abortTxn(SCOPE, STREAM, tx2, tx2Version, null).join());
// Ensure that transactions state is ABORTING.
assertEquals(TxnStatus.ABORTING, streamStore.transactionStatus(SCOPE, STREAM, tx2, null, executor).join());
// Ensure that commit (resp. abort) transaction tasks are idempotent
// when transaction is in COMMITTING state (resp. ABORTING state).
assertEquals(TxnStatus.COMMITTING, txnTasks.commitTxn(SCOPE, STREAM, tx1, null).join());
assertEquals(TxnStatus.ABORTING, txnTasks.abortTxn(SCOPE, STREAM, tx2, null, null).join());
// Create commit and abort event processors.
ConnectionFactory connectionFactory = Mockito.mock(ConnectionFactory.class);
BlockingQueue<CommitEvent> processedCommitEvents = new LinkedBlockingQueue<>();
BlockingQueue<AbortEvent> processedAbortEvents = new LinkedBlockingQueue<>();
createEventProcessor("commitRG", "commitStream", commitReader, commitWriter, () -> new CommitEventProcessor(streamStore, streamMetadataTasks, hostStore, executor, segmentHelperMock, connectionFactory, processedCommitEvents));
createEventProcessor("abortRG", "abortStream", abortReader, abortWriter, () -> new ConcurrentEventProcessor<>(new AbortRequestHandler(streamStore, streamMetadataTasks, hostStore, executor, segmentHelperMock, connectionFactory, processedAbortEvents), executor));
// Wait until the commit event is processed and ensure that the txn state is COMMITTED.
CommitEvent commitEvent = processedCommitEvents.take();
assertEquals(tx1, commitEvent.getTxid());
assertEquals(TxnStatus.COMMITTED, streamStore.transactionStatus(SCOPE, STREAM, tx1, null, executor).join());
// Wait until the abort event is processed and ensure that the txn state is ABORTED.
AbortEvent abortEvent = processedAbortEvents.take();
assertEquals(tx2, abortEvent.getTxid());
assertEquals(TxnStatus.ABORTED, streamStore.transactionStatus(SCOPE, STREAM, tx2, null, executor).join());
// Ensure that commit (resp. abort) transaction tasks are idempotent
// even after transaction is committed (resp. aborted)
assertEquals(TxnStatus.COMMITTED, txnTasks.commitTxn(SCOPE, STREAM, tx1, null).join());
assertEquals(TxnStatus.ABORTED, txnTasks.abortTxn(SCOPE, STREAM, tx2, null, null).join());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class TaskTest method testStreamTaskSweeping.
@Test(timeout = 10000)
public void testStreamTaskSweeping() {
final String stream = "testPartialCreationStream";
final String deadHost = "deadHost";
final int initialSegments = 2;
final ScalingPolicy policy1 = ScalingPolicy.fixed(initialSegments);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
final ArrayList<Integer> sealSegments = new ArrayList<>();
sealSegments.add(0);
final ArrayList<AbstractMap.SimpleEntry<Double, Double>> newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.25));
newRanges.add(new AbstractMap.SimpleEntry<>(0.25, 0.5));
// Create objects.
StreamMetadataTasks mockStreamTasks = new StreamMetadataTasks(streamStore, hostStore, taskMetadataStore, segmentHelperMock, executor, deadHost, Mockito.mock(ConnectionFactory.class), false, "");
mockStreamTasks.setCreateIndexOnlyMode();
TaskSweeper sweeper = new TaskSweeper(taskMetadataStore, HOSTNAME, executor, streamMetadataTasks);
// Create stream test.
completePartialTask(mockStreamTasks.createStream(SCOPE, stream, configuration1, System.currentTimeMillis()), deadHost, sweeper);
Assert.assertEquals(initialSegments, streamStore.getActiveSegments(SCOPE, stream, null, executor).join().size());
List<StreamConfiguration> streams = streamStore.listStreamsInScope(SCOPE).join();
Assert.assertTrue(streams.stream().allMatch(x -> !x.getStreamName().equals(stream)));
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class ModelHelperTest method testGetCreateStreamConfig.
@Test
public void testGetCreateStreamConfig() {
ScalingConfig scalingConfig = new ScalingConfig();
scalingConfig.setType(ScalingConfig.TypeEnum.FIXED_NUM_SEGMENTS);
scalingConfig.setMinSegments(2);
CreateStreamRequest createStreamRequest = new CreateStreamRequest();
createStreamRequest.setStreamName("stream");
createStreamRequest.setScalingPolicy(scalingConfig);
StreamConfiguration streamConfig = getCreateStreamConfig(createStreamRequest, "scope");
Assert.assertEquals(ScalingPolicy.ScaleType.FIXED_NUM_SEGMENTS, streamConfig.getScalingPolicy().getScaleType());
Assert.assertEquals(2, streamConfig.getScalingPolicy().getMinNumSegments());
Assert.assertNull(streamConfig.getRetentionPolicy());
scalingConfig.setType(ScalingConfig.TypeEnum.BY_RATE_IN_EVENTS_PER_SEC);
scalingConfig.setTargetRate(123);
scalingConfig.setScaleFactor(2);
RetentionConfig retentionConfig = new RetentionConfig();
retentionConfig.setType(RetentionConfig.TypeEnum.LIMITED_DAYS);
retentionConfig.setValue(1234L);
createStreamRequest.setStreamName("stream");
createStreamRequest.setScalingPolicy(scalingConfig);
createStreamRequest.setRetentionPolicy(retentionConfig);
streamConfig = getCreateStreamConfig(createStreamRequest, "scope");
Assert.assertEquals(ScalingPolicy.ScaleType.BY_RATE_IN_EVENTS_PER_SEC, streamConfig.getScalingPolicy().getScaleType());
Assert.assertEquals("scope", streamConfig.getScope());
Assert.assertEquals("stream", streamConfig.getStreamName());
Assert.assertEquals(2, streamConfig.getScalingPolicy().getScaleFactor());
Assert.assertEquals(123, streamConfig.getScalingPolicy().getTargetRate());
Assert.assertEquals(RetentionPolicy.RetentionType.TIME, streamConfig.getRetentionPolicy().getRetentionType());
Assert.assertEquals(Duration.ofDays(1234L).toMillis(), streamConfig.getRetentionPolicy().getRetentionParam());
scalingConfig.setType(ScalingConfig.TypeEnum.BY_RATE_IN_KBYTES_PER_SEC);
scalingConfig.setTargetRate(1234);
scalingConfig.setScaleFactor(23);
retentionConfig.setType(RetentionConfig.TypeEnum.LIMITED_SIZE_MB);
retentionConfig.setValue(12345L);
createStreamRequest.setStreamName("stream");
createStreamRequest.setScalingPolicy(scalingConfig);
createStreamRequest.setRetentionPolicy(retentionConfig);
streamConfig = getCreateStreamConfig(createStreamRequest, "scope");
Assert.assertEquals(ScalingPolicy.ScaleType.BY_RATE_IN_KBYTES_PER_SEC, streamConfig.getScalingPolicy().getScaleType());
Assert.assertEquals(23, streamConfig.getScalingPolicy().getScaleFactor());
Assert.assertEquals(1234, streamConfig.getScalingPolicy().getTargetRate());
Assert.assertEquals(RetentionPolicy.RetentionType.SIZE, streamConfig.getRetentionPolicy().getRetentionType());
Assert.assertEquals(12345L * 1024 * 1024, streamConfig.getRetentionPolicy().getRetentionParam());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class ModelHelperTest method testGetUpdateStreamConfig.
@Test
public void testGetUpdateStreamConfig() {
ScalingConfig scalingConfig = new ScalingConfig();
scalingConfig.setType(ScalingConfig.TypeEnum.FIXED_NUM_SEGMENTS);
scalingConfig.setMinSegments(2);
UpdateStreamRequest updateStreamRequest = new UpdateStreamRequest();
updateStreamRequest.setScalingPolicy(scalingConfig);
StreamConfiguration streamConfig = getUpdateStreamConfig(updateStreamRequest, "scope", "stream");
Assert.assertEquals(ScalingPolicy.ScaleType.FIXED_NUM_SEGMENTS, streamConfig.getScalingPolicy().getScaleType());
Assert.assertEquals(2, streamConfig.getScalingPolicy().getMinNumSegments());
Assert.assertNull(streamConfig.getRetentionPolicy());
scalingConfig.setType(ScalingConfig.TypeEnum.BY_RATE_IN_EVENTS_PER_SEC);
scalingConfig.setTargetRate(123);
scalingConfig.setScaleFactor(2);
RetentionConfig retentionConfig = new RetentionConfig();
retentionConfig.setType(RetentionConfig.TypeEnum.LIMITED_DAYS);
retentionConfig.setValue(1234L);
updateStreamRequest.setScalingPolicy(scalingConfig);
updateStreamRequest.setRetentionPolicy(retentionConfig);
streamConfig = getUpdateStreamConfig(updateStreamRequest, "scope", "stream");
Assert.assertEquals("scope", streamConfig.getScope());
Assert.assertEquals("stream", streamConfig.getStreamName());
Assert.assertEquals(ScalingPolicy.ScaleType.BY_RATE_IN_EVENTS_PER_SEC, streamConfig.getScalingPolicy().getScaleType());
Assert.assertEquals(2, streamConfig.getScalingPolicy().getScaleFactor());
Assert.assertEquals(123, streamConfig.getScalingPolicy().getTargetRate());
Assert.assertEquals(RetentionPolicy.RetentionType.TIME, streamConfig.getRetentionPolicy().getRetentionType());
Assert.assertEquals(Duration.ofDays(1234L).toMillis(), streamConfig.getRetentionPolicy().getRetentionParam());
scalingConfig.setType(ScalingConfig.TypeEnum.BY_RATE_IN_KBYTES_PER_SEC);
scalingConfig.setTargetRate(1234);
scalingConfig.setScaleFactor(23);
retentionConfig.setType(RetentionConfig.TypeEnum.LIMITED_SIZE_MB);
retentionConfig.setValue(12345L);
updateStreamRequest.setScalingPolicy(scalingConfig);
updateStreamRequest.setRetentionPolicy(retentionConfig);
streamConfig = getUpdateStreamConfig(updateStreamRequest, "scope", "stream");
Assert.assertEquals(ScalingPolicy.ScaleType.BY_RATE_IN_KBYTES_PER_SEC, streamConfig.getScalingPolicy().getScaleType());
Assert.assertEquals(23, streamConfig.getScalingPolicy().getScaleFactor());
Assert.assertEquals(1234, streamConfig.getScalingPolicy().getTargetRate());
Assert.assertEquals(RetentionPolicy.RetentionType.SIZE, streamConfig.getRetentionPolicy().getRetentionType());
Assert.assertEquals(12345L * 1024 * 1024, streamConfig.getRetentionPolicy().getRetentionParam());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class StreamMetaDataTests method testListStreams.
/**
* Test for listStreams REST API.
*
* @throws ExecutionException
* @throws InterruptedException
*/
@Test
public void testListStreams() throws ExecutionException, InterruptedException {
final String resourceURI = getURI() + "v1/scopes/scope1/streams";
final StreamConfiguration streamConfiguration1 = StreamConfiguration.builder().scope(scope1).streamName(stream1).scalingPolicy(ScalingPolicy.byEventRate(100, 2, 2)).retentionPolicy(RetentionPolicy.byTime(Duration.ofMillis(123L))).build();
final StreamConfiguration streamConfiguration2 = StreamConfiguration.builder().scope(scope1).streamName(stream2).scalingPolicy(ScalingPolicy.byEventRate(100, 2, 2)).retentionPolicy(RetentionPolicy.byTime(Duration.ofMillis(123L))).build();
// Test to list streams.
List<StreamConfiguration> streamsList = Arrays.asList(streamConfiguration1, streamConfiguration2);
when(mockControllerService.listStreamsInScope("scope1")).thenReturn(CompletableFuture.completedFuture(streamsList));
Response response = addAuthHeaders(client.target(resourceURI).request()).buildGet().invoke();
assertEquals("List Streams response code", 200, response.getStatus());
assertTrue(response.bufferEntity());
final StreamsList streamsList1 = response.readEntity(StreamsList.class);
assertEquals("List count", streamsList1.getStreams().size(), 2);
assertEquals("List element", streamsList1.getStreams().get(0).getStreamName(), "stream1");
assertEquals("List element", streamsList1.getStreams().get(1).getStreamName(), "stream2");
response.close();
// Test for list streams for invalid scope.
final CompletableFuture<List<StreamConfiguration>> completableFuture1 = new CompletableFuture<>();
completableFuture1.completeExceptionally(StoreException.create(StoreException.Type.DATA_NOT_FOUND, "scope1"));
when(mockControllerService.listStreamsInScope("scope1")).thenReturn(completableFuture1);
response = addAuthHeaders(client.target(resourceURI).request()).buildGet().invoke();
assertEquals("List Streams response code", 404, response.getStatus());
response.close();
// Test for list streams failure.
final CompletableFuture<List<StreamConfiguration>> completableFuture = new CompletableFuture<>();
completableFuture.completeExceptionally(new Exception());
when(mockControllerService.listStreamsInScope("scope1")).thenReturn(completableFuture);
response = addAuthHeaders(client.target(resourceURI).request()).buildGet().invoke();
assertEquals("List Streams response code", 500, response.getStatus());
response.close();
// Test for filtering streams.
final StreamConfiguration streamConfiguration3 = StreamConfiguration.builder().scope(scope1).streamName(NameUtils.getInternalNameForStream("stream3")).scalingPolicy(ScalingPolicy.fixed(1)).build();
List<StreamConfiguration> allStreamsList = Arrays.asList(streamConfiguration1, streamConfiguration2, streamConfiguration3);
when(mockControllerService.listStreamsInScope("scope1")).thenReturn(CompletableFuture.completedFuture(allStreamsList));
response = addAuthHeaders(client.target(resourceURI).request()).buildGet().invoke();
assertEquals("List Streams response code", 200, response.getStatus());
assertTrue(response.bufferEntity());
StreamsList streamsListResp = response.readEntity(StreamsList.class);
assertEquals("List count", 2, streamsListResp.getStreams().size());
assertEquals("List element", "stream1", streamsListResp.getStreams().get(0).getStreamName());
assertEquals("List element", "stream2", streamsListResp.getStreams().get(1).getStreamName());
response.close();
response = addAuthHeaders(client.target(resourceURI).queryParam("showInternalStreams", "true").request()).buildGet().invoke();
assertEquals("List Streams response code", 200, response.getStatus());
assertTrue(response.bufferEntity());
streamsListResp = response.readEntity(StreamsList.class);
assertEquals("List count", 1, streamsListResp.getStreams().size());
assertEquals("List element", NameUtils.getInternalNameForStream("stream3"), streamsListResp.getStreams().get(0).getStreamName());
response.close();
// Test to list large number of streams.
streamsList = Collections.nCopies(50000, streamConfiguration1);
when(mockControllerService.listStreamsInScope("scope1")).thenReturn(CompletableFuture.completedFuture(streamsList));
response = addAuthHeaders(client.target(resourceURI).request()).buildGet().invoke();
assertEquals("List Streams response code", 200, response.getStatus());
assertTrue(response.bufferEntity());
final StreamsList streamsList2 = response.readEntity(StreamsList.class);
assertEquals("List count", 50000, streamsList2.getStreams().size());
response.close();
}
Aggregations