use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.
the class KafkaIOExternalTest method testConstructKafkaReadWithoutMetadata.
@Test
public void testConstructKafkaReadWithoutMetadata() throws Exception {
List<String> topics = ImmutableList.of("topic1", "topic2");
String keyDeserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
String valueDeserializer = "org.apache.kafka.common.serialization.LongDeserializer";
ImmutableMap<String, String> consumerConfig = ImmutableMap.<String, String>builder().put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "server1:port,server2:port").put("key2", "value2").put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer).put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer).build();
Long startReadTime = 100L;
ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topics", FieldType.array(FieldType.STRING)), Field.of("consumer_config", FieldType.map(FieldType.STRING, FieldType.STRING)), Field.of("key_deserializer", FieldType.STRING), Field.of("value_deserializer", FieldType.STRING), Field.of("start_read_time", FieldType.INT64), Field.of("commit_offset_in_finalize", FieldType.BOOLEAN), Field.of("timestamp_policy", FieldType.STRING))).withFieldValue("topics", topics).withFieldValue("consumer_config", consumerConfig).withFieldValue("key_deserializer", keyDeserializer).withFieldValue("value_deserializer", valueDeserializer).withFieldValue("start_read_time", startReadTime).withFieldValue("commit_offset_in_finalize", false).withFieldValue("timestamp_policy", "ProcessingTime").build());
RunnerApi.Components defaultInstance = RunnerApi.Components.getDefaultInstance();
ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(defaultInstance).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(org.apache.beam.sdk.io.kafka.KafkaIO.Read.External.URN_WITHOUT_METADATA).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
ExpansionService expansionService = new ExpansionService();
TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
expansionService.expand(request, observer);
ExpansionApi.ExpansionResponse result = observer.result;
RunnerApi.PTransform transform = result.getTransform();
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*KafkaIO-Read.*")));
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Remove-Kafka-Metadata.*")));
assertThat(transform.getInputsCount(), Matchers.is(0));
assertThat(transform.getOutputsCount(), Matchers.is(1));
RunnerApi.PTransform kafkaReadComposite = result.getComponents().getTransformsOrThrow(transform.getSubtransforms(0));
result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0));
verifyKafkaReadComposite(result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0)), result);
}
use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.
the class PubsubIOExternalTest method testConstructPubsubRead.
@Test
public void testConstructPubsubRead() throws Exception {
String topic = "projects/project-1234/topics/topic_name";
String idAttribute = "id_foo";
Boolean needsAttributes = true;
ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topic", FieldType.STRING), Field.of("id_label", FieldType.STRING), Field.of("with_attributes", FieldType.BOOLEAN))).withFieldValue("topic", topic).withFieldValue("id_label", idAttribute).withFieldValue("with_attributes", needsAttributes).build());
RunnerApi.Components defaultInstance = RunnerApi.Components.getDefaultInstance();
ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(defaultInstance).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(ExternalRead.URN).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
ExpansionService expansionService = new ExpansionService();
TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
expansionService.expand(request, observer);
ExpansionApi.ExpansionResponse result = observer.result;
RunnerApi.PTransform transform = result.getTransform();
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*PubsubUnboundedSource.*")));
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*MapElements.*")));
assertThat(transform.getInputsCount(), Matchers.is(0));
assertThat(transform.getOutputsCount(), Matchers.is(1));
}
use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.
the class JobServerDriver method createExpansionService.
@Nullable
private ExpansionServer createExpansionService() throws IOException {
// Skip creating expansion server if configured port is negative.
if (configuration.expansionPort < 0) {
return null;
}
ExpansionServer expansionServer = ExpansionServer.create(new ExpansionService(), configuration.host, configuration.expansionPort);
LOG.info("Java ExpansionService started on {}:{}", expansionServer.getHost(), expansionServer.getPort());
return expansionServer;
}
use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.
the class KafkaIOExternalTest method testConstructKafkaWrite.
@Test
public void testConstructKafkaWrite() throws Exception {
String topic = "topic";
String keySerializer = "org.apache.kafka.common.serialization.ByteArraySerializer";
String valueSerializer = "org.apache.kafka.common.serialization.LongSerializer";
ImmutableMap<String, String> producerConfig = ImmutableMap.<String, String>builder().put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "server1:port,server2:port").put("retries", "3").build();
ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topic", FieldType.STRING), Field.of("producer_config", FieldType.map(FieldType.STRING, FieldType.STRING)), Field.of("key_serializer", FieldType.STRING), Field.of("value_serializer", FieldType.STRING))).withFieldValue("topic", topic).withFieldValue("producer_config", producerConfig).withFieldValue("key_serializer", keySerializer).withFieldValue("value_serializer", valueSerializer).build());
Pipeline p = Pipeline.create();
p.apply(Impulse.create()).apply(WithKeys.of("key"));
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p);
String inputPCollection = Iterables.getOnlyElement(Iterables.getLast(pipelineProto.getComponents().getTransformsMap().values()).getOutputsMap().values());
ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(pipelineProto.getComponents()).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").putInputs("input", inputPCollection).setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(org.apache.beam.sdk.io.kafka.KafkaIO.Write.External.URN).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
ExpansionService expansionService = new ExpansionService();
TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
expansionService.expand(request, observer);
ExpansionApi.ExpansionResponse result = observer.result;
RunnerApi.PTransform transform = result.getTransform();
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Kafka-ProducerRecord.*")));
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*KafkaIO-WriteRecords.*")));
assertThat(transform.getInputsCount(), Matchers.is(1));
assertThat(transform.getOutputsCount(), Matchers.is(0));
RunnerApi.PTransform writeComposite = result.getComponents().getTransformsOrThrow(transform.getSubtransforms(1));
RunnerApi.PTransform writeParDo = result.getComponents().getTransformsOrThrow(result.getComponents().getTransformsOrThrow(writeComposite.getSubtransforms(0)).getSubtransforms(0));
RunnerApi.ParDoPayload parDoPayload = RunnerApi.ParDoPayload.parseFrom(writeParDo.getSpec().getPayload());
DoFn kafkaWriter = ParDoTranslation.getDoFn(parDoPayload);
assertThat(kafkaWriter, Matchers.instanceOf(KafkaWriter.class));
KafkaIO.WriteRecords spec = (KafkaIO.WriteRecords) Whitebox.getInternalState(kafkaWriter, "spec");
assertThat(spec.getProducerConfig(), Matchers.is(producerConfig));
assertThat(spec.getTopic(), Matchers.is(topic));
assertThat(spec.getKeySerializer().getName(), Matchers.is(keySerializer));
assertThat(spec.getValueSerializer().getName(), Matchers.is(valueSerializer));
}
use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.
the class KafkaIOExternalTest method testConstructKafkaRead.
@Test
public void testConstructKafkaRead() throws Exception {
List<String> topics = ImmutableList.of("topic1", "topic2");
String keyDeserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
String valueDeserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
ImmutableMap<String, String> consumerConfig = ImmutableMap.<String, String>builder().put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "server1:port,server2:port").put("key2", "value2").put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer).put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer).build();
Long startReadTime = 100L;
ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topics", FieldType.array(FieldType.STRING)), Field.of("consumer_config", FieldType.map(FieldType.STRING, FieldType.STRING)), Field.of("key_deserializer", FieldType.STRING), Field.of("value_deserializer", FieldType.STRING), Field.of("start_read_time", FieldType.INT64), Field.of("commit_offset_in_finalize", FieldType.BOOLEAN), Field.of("timestamp_policy", FieldType.STRING))).withFieldValue("topics", topics).withFieldValue("consumer_config", consumerConfig).withFieldValue("key_deserializer", keyDeserializer).withFieldValue("value_deserializer", valueDeserializer).withFieldValue("start_read_time", startReadTime).withFieldValue("commit_offset_in_finalize", false).withFieldValue("timestamp_policy", "ProcessingTime").build());
RunnerApi.Components defaultInstance = RunnerApi.Components.getDefaultInstance();
ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(defaultInstance).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(org.apache.beam.sdk.io.kafka.KafkaIO.Read.External.URN_WITH_METADATA).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
ExpansionService expansionService = new ExpansionService();
TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
expansionService.expand(request, observer);
ExpansionApi.ExpansionResponse result = observer.result;
RunnerApi.PTransform transform = result.getTransform();
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*KafkaIO-Read.*")));
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Convert-to-ExternalKafkaRecord.*")));
assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Convert-ConvertTransform.*")));
assertThat(transform.getInputsCount(), Matchers.is(0));
assertThat(transform.getOutputsCount(), Matchers.is(1));
RunnerApi.PTransform kafkaReadComposite = result.getComponents().getTransformsOrThrow(transform.getSubtransforms(0));
verifyKafkaReadComposite(result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0)), result);
}
Aggregations