use of org.apache.samza.system.descriptors.DelegatingSystemDescriptor in project samza by apache.
the class TestCouchbaseRemoteTableEndToEnd method testEndToEnd.
@Test
public void testEndToEnd() {
Bucket inputBucket = cluster.openBucket(inputBucketName);
inputBucket.upsert(ByteArrayDocument.create("Alice", "20".getBytes()));
inputBucket.upsert(ByteArrayDocument.create("Bob", "30".getBytes()));
inputBucket.upsert(ByteArrayDocument.create("Chris", "40".getBytes()));
inputBucket.upsert(ByteArrayDocument.create("David", "50".getBytes()));
inputBucket.close();
List<String> users = Arrays.asList("Alice", "Bob", "Chris", "David");
final StreamApplication app = appDesc -> {
DelegatingSystemDescriptor inputSystemDescriptor = new DelegatingSystemDescriptor("test");
GenericInputDescriptor<String> inputDescriptor = inputSystemDescriptor.getInputDescriptor("User", new NoOpSerde<>());
CouchbaseTableReadFunction<String> readFunction = new CouchbaseTableReadFunction<>(inputBucketName, String.class, "couchbase://127.0.0.1").withBootstrapCarrierDirectPort(couchbaseMock.getCarrierPort(inputBucketName)).withBootstrapHttpDirectPort(couchbaseMock.getHttpPort()).withSerde(new StringSerde());
CouchbaseTableWriteFunction<JsonObject> writeFunction = new CouchbaseTableWriteFunction<>(outputBucketName, JsonObject.class, "couchbase://127.0.0.1").withBootstrapCarrierDirectPort(couchbaseMock.getCarrierPort(outputBucketName)).withBootstrapHttpDirectPort(couchbaseMock.getHttpPort());
RemoteTableDescriptor inputTableDesc = new RemoteTableDescriptor<String, String, Void>("input-table").withReadFunction(readFunction).withRateLimiterDisabled();
Table<KV<String, String>> inputTable = appDesc.getTable(inputTableDesc);
RemoteTableDescriptor outputTableDesc = new RemoteTableDescriptor<String, JsonObject, Object>("output-table").withReadFunction(new NoOpTableReadFunction<>()).withWriteFunction(writeFunction).withRateLimiterDisabled();
Table<KV<String, JsonObject>> outputTable = appDesc.getTable(outputTableDesc);
appDesc.getInputStream(inputDescriptor).map(k -> KV.of(k, k)).join(inputTable, new JoinFunction()).sendTo(outputTable);
};
InMemorySystemDescriptor isd = new InMemorySystemDescriptor("test");
InMemoryInputDescriptor<TestTableData.PageView> inputDescriptor = isd.getInputDescriptor("User", new NoOpSerde<>());
TestRunner.of(app).addInputStream(inputDescriptor, users).run(Duration.ofSeconds(10));
Bucket outputBucket = cluster.openBucket(outputBucketName);
Assert.assertEquals("{\"name\":\"Alice\",\"age\":\"20\"}", outputBucket.get("Alice").content().toString());
Assert.assertEquals("{\"name\":\"Bob\",\"age\":\"30\"}", outputBucket.get("Bob").content().toString());
Assert.assertEquals("{\"name\":\"Chris\",\"age\":\"40\"}", outputBucket.get("Chris").content().toString());
Assert.assertEquals("{\"name\":\"David\",\"age\":\"50\"}", outputBucket.get("David").content().toString());
outputBucket.close();
}
use of org.apache.samza.system.descriptors.DelegatingSystemDescriptor in project samza by apache.
the class TestRemoteTableWithBatchEndToEnd method doTestStreamTableJoinRemoteTable.
private void doTestStreamTableJoinRemoteTable(String testName, boolean batchRead, boolean batchWrite) throws Exception {
final InMemoryWriteFunction writer = new InMemoryWriteFunction(testName);
BATCH_READS.put(testName, new AtomicInteger());
BATCH_WRITES.put(testName, new AtomicInteger());
WRITTEN_RECORDS.put(testName, new HashMap<>());
int count = 16;
int batchSize = 4;
String profiles = Base64Serializer.serialize(generateProfiles(count));
final RateLimiter readRateLimiter = mock(RateLimiter.class, withSettings().serializable());
final RateLimiter writeRateLimiter = mock(RateLimiter.class, withSettings().serializable());
final TableRateLimiter.CreditFunction creditFunction = (k, v, args) -> 1;
final StreamApplication app = appDesc -> {
RemoteTableDescriptor<Integer, Profile, Void> inputTableDesc = new RemoteTableDescriptor<>("profile-table-1");
inputTableDesc.withReadFunction(InMemoryReadFunction.getInMemoryReadFunction(testName, profiles)).withRateLimiter(readRateLimiter, creditFunction, null);
if (batchRead) {
inputTableDesc.withBatchProvider(new CompactBatchProvider().withMaxBatchSize(batchSize).withMaxBatchDelay(Duration.ofHours(1)));
}
// dummy reader
TableReadFunction readFn = new MyReadFunction();
RemoteTableDescriptor<Integer, EnrichedPageView, EnrichedPageView> outputTableDesc = new RemoteTableDescriptor<>("enriched-page-view-table-1");
outputTableDesc.withReadFunction(readFn).withWriteFunction(writer).withRateLimiter(writeRateLimiter, creditFunction, creditFunction);
if (batchWrite) {
outputTableDesc.withBatchProvider(new CompactBatchProvider().withMaxBatchSize(batchSize).withMaxBatchDelay(Duration.ofHours(1)));
}
Table outputTable = appDesc.getTable(outputTableDesc);
Table<KV<Integer, Profile>> inputTable = appDesc.getTable(inputTableDesc);
DelegatingSystemDescriptor ksd = new DelegatingSystemDescriptor("test");
GenericInputDescriptor<PageView> isd = ksd.getInputDescriptor("PageView", new NoOpSerde<>());
appDesc.getInputStream(isd).map(pv -> new KV<>(pv.getMemberId(), pv)).join(inputTable, new PageViewToProfileJoinFunction()).map(m -> new KV<>(m.getMemberId(), m)).sendTo(outputTable);
};
InMemorySystemDescriptor isd = new InMemorySystemDescriptor("test");
InMemoryInputDescriptor<PageView> inputDescriptor = isd.getInputDescriptor("PageView", new NoOpSerde<>());
TestRunner.of(app).addInputStream(inputDescriptor, Arrays.asList(generatePageViewsWithDistinctKeys(count))).addConfig("task.max.concurrency", String.valueOf(count)).addConfig("task.async.commit", String.valueOf(true)).run(Duration.ofSeconds(10));
Assert.assertEquals(count, WRITTEN_RECORDS.get(testName).size());
Assert.assertNotNull(WRITTEN_RECORDS.get(testName).get(0));
if (batchWrite) {
Assert.assertEquals(count / batchSize, BATCH_WRITES.get(testName).get());
}
}
use of org.apache.samza.system.descriptors.DelegatingSystemDescriptor in project samza by apache.
the class WatermarkIntegrationTest method testWatermark.
@Test
public void testWatermark() throws Exception {
Map<String, String> configs = new HashMap<>();
configs.put(ApplicationConfig.APP_RUNNER_CLASS, MockLocalApplicationRunner.class.getName());
configs.put("systems.test.samza.factory", TestSystemFactory.class.getName());
configs.put("streams.PageView.samza.system", "test");
configs.put("streams.PageView.partitionCount", String.valueOf(PARTITION_COUNT));
configs.put(JobConfig.JOB_NAME, "test-watermark-job");
configs.put(JobConfig.PROCESSOR_ID, "1");
configs.put(JobCoordinatorConfig.JOB_COORDINATOR_FACTORY, PassthroughJobCoordinatorFactory.class.getName());
configs.put(TaskConfig.GROUPER_FACTORY, SingleContainerGrouperFactory.class.getName());
configs.put("systems.kafka.samza.factory", "org.apache.samza.system.kafka.KafkaSystemFactory");
configs.put("systems.kafka.producer.bootstrap.servers", bootstrapUrl());
configs.put("systems.kafka.consumer.zookeeper.connect", zkConnect());
configs.put("systems.kafka.samza.key.serde", "int");
configs.put("systems.kafka.samza.msg.serde", "json");
configs.put("systems.kafka.default.stream.replication.factor", "1");
configs.put("job.default.system", "kafka");
configs.put("serializers.registry.int.class", IntegerSerdeFactory.class.getName());
configs.put("serializers.registry.string.class", StringSerdeFactory.class.getName());
configs.put("serializers.registry.json.class", PageViewJsonSerdeFactory.class.getName());
List<PageView> received = new ArrayList<>();
class TestStreamApp implements StreamApplication {
@Override
public void describe(StreamApplicationDescriptor appDescriptor) {
DelegatingSystemDescriptor sd = new DelegatingSystemDescriptor("test");
GenericInputDescriptor<KV<String, PageView>> isd = sd.getInputDescriptor("PageView", KVSerde.of(new NoOpSerde<>(), new NoOpSerde<>()));
appDescriptor.getInputStream(isd).map(KV::getValue).partitionBy(pv -> pv.getMemberId(), pv -> pv, KVSerde.of(new NoOpSerde<>(), new NoOpSerde<>()), "p1").sink((m, collector, coordinator) -> {
received.add(m.getValue());
});
}
}
Config config = new MapConfig(configs);
final ApplicationRunner runner = ApplicationRunners.getApplicationRunner(new TestStreamApp(), config);
executeRun(runner, config);
// processors are only available when the app is running
Map<String, StreamOperatorTask> tasks = getTaskOperationGraphs((MockLocalApplicationRunner) runner);
runner.waitForFinish();
// wait for the completion to ensure that all tasks are actually initialized and the OperatorImplGraph is initialized
StreamOperatorTask task0 = tasks.get("Partition 0");
OperatorImplGraph graph = TestStreamOperatorTask.getOperatorImplGraph(task0);
OperatorImpl pb = getOperator(graph, OperatorSpec.OpCode.PARTITION_BY);
assertEquals(TestOperatorImpl.getInputWatermark(pb), 4);
assertEquals(TestOperatorImpl.getOutputWatermark(pb), 4);
OperatorImpl sink = getOperator(graph, OperatorSpec.OpCode.SINK);
assertEquals(TestOperatorImpl.getInputWatermark(sink), 3);
assertEquals(TestOperatorImpl.getOutputWatermark(sink), 3);
StreamOperatorTask task1 = tasks.get("Partition 1");
graph = TestStreamOperatorTask.getOperatorImplGraph(task1);
pb = getOperator(graph, OperatorSpec.OpCode.PARTITION_BY);
assertEquals(TestOperatorImpl.getInputWatermark(pb), 3);
assertEquals(TestOperatorImpl.getOutputWatermark(pb), 3);
sink = getOperator(graph, OperatorSpec.OpCode.SINK);
assertEquals(TestOperatorImpl.getInputWatermark(sink), 3);
assertEquals(TestOperatorImpl.getOutputWatermark(sink), 3);
}
use of org.apache.samza.system.descriptors.DelegatingSystemDescriptor in project samza by apache.
the class TestRemoteTableEndToEnd method testSendToUpdatesWithoutUpdateOptions.
// Test will fail as we use sendTo with KV<K, UpdateMessage> stream without UpdateOptions
@Test(expected = SamzaException.class)
public void testSendToUpdatesWithoutUpdateOptions() throws Exception {
// max member id for page views is 10
final String profiles = Base64Serializer.serialize(generateProfiles(10));
final RateLimiter readRateLimiter = mock(RateLimiter.class, withSettings().serializable());
final TableRateLimiter.CreditFunction creditFunction = (k, v, args) -> 1;
final StreamApplication app = appDesc -> {
final RemoteTableDescriptor joinTableDesc = new RemoteTableDescriptor<Integer, TestTableData.Profile, Void>("profile-table-1").withReadFunction(InMemoryProfileReadFunction.getInMemoryReadFunction(profiles)).withRateLimiter(readRateLimiter, creditFunction, null);
final RemoteTableDescriptor outputTableDesc = new RemoteTableDescriptor<Integer, EnrichedPageView, EnrichedPageView>("enriched-page-view-table-1").withReadFunction(new NoOpTableReadFunction<>()).withReadRateLimiterDisabled().withWriteFunction(new InMemoryEnrichedPageViewWriteFunction2("testUpdateWithoutUpdateOptions", false)).withWriteRateLimit(1000);
final Table<KV<Integer, Profile>> outputTable = appDesc.getTable(outputTableDesc);
final Table<KV<Integer, Profile>> joinTable = appDesc.getTable(joinTableDesc);
final DelegatingSystemDescriptor ksd = new DelegatingSystemDescriptor("test");
final GenericInputDescriptor<PageView> isd = ksd.getInputDescriptor("PageView", new NoOpSerde<>());
appDesc.getInputStream(isd).map(pv -> new KV<>(pv.getMemberId(), pv)).join(joinTable, new PageViewToProfileJoinFunction()).map(m -> new KV(m.getMemberId(), UpdateMessage.of(m, m))).sendTo(outputTable);
};
int numPageViews = 40;
InMemorySystemDescriptor isd = new InMemorySystemDescriptor("test");
InMemoryInputDescriptor<PageView> inputDescriptor = isd.getInputDescriptor("PageView", new NoOpSerde<>());
TestRunner.of(app).addInputStream(inputDescriptor, TestTableData.generatePartitionedPageViews(numPageViews, 4)).run(Duration.ofSeconds(10));
}
use of org.apache.samza.system.descriptors.DelegatingSystemDescriptor in project samza by apache.
the class TestRemoteTableEndToEnd method testSendToUpdatesFailureAfterPutDefault.
// Test fails with the following exception:
// org.apache.samza.SamzaException: Update after Put default failed with exception.
@Test(expected = SamzaException.class)
public void testSendToUpdatesFailureAfterPutDefault() throws Exception {
// the test checks for failure when update after put default fails
String testName = "testSendToUpdatesFailureAfterPutDefault";
final String profiles = Base64Serializer.serialize(generateProfiles(30));
final RateLimiter readRateLimiter = mock(RateLimiter.class, withSettings().serializable());
final TableRateLimiter.CreditFunction creditFunction = (k, v, args) -> 1;
final StreamApplication app = appDesc -> {
final RemoteTableDescriptor joinTableDesc = new RemoteTableDescriptor<Integer, TestTableData.Profile, Void>("profile-table-1").withReadFunction(InMemoryProfileReadFunction.getInMemoryReadFunction(profiles)).withRateLimiter(readRateLimiter, creditFunction, null);
final RemoteTableDescriptor outputTableDesc = new RemoteTableDescriptor<Integer, EnrichedPageView, EnrichedPageView>("enriched-page-view-table-1").withReadFunction(new NoOpTableReadFunction<>()).withReadRateLimiterDisabled().withWriteFunction(new InMemoryEnrichedPageViewWriteFunction2(testName, false, true)).withWriteRateLimit(1000);
final Table<KV<Integer, Profile>> outputTable = appDesc.getTable(outputTableDesc);
final Table<KV<Integer, Profile>> joinTable = appDesc.getTable(joinTableDesc);
final DelegatingSystemDescriptor ksd = new DelegatingSystemDescriptor("test");
final GenericInputDescriptor<PageView> isd = ksd.getInputDescriptor("PageView", new NoOpSerde<>());
appDesc.getInputStream(isd).map(pv -> new KV<>(pv.getMemberId(), pv)).join(joinTable, new PageViewToProfileJoinFunction()).map(m -> new KV(m.getMemberId(), UpdateMessage.of(m, m))).sendTo(outputTable, UpdateOptions.UPDATE_WITH_DEFAULTS);
};
int numPageViews = 15;
InMemorySystemDescriptor isd = new InMemorySystemDescriptor("test");
InMemoryInputDescriptor<PageView> inputDescriptor = isd.getInputDescriptor("PageView", new NoOpSerde<>());
Map<Integer, List<PageView>> integerListMap = TestTableData.generatePartitionedPageViews(numPageViews, 1);
TestRunner.of(app).addInputStream(inputDescriptor, integerListMap).run(Duration.ofSeconds(10));
}
Aggregations