Search in sources :

Example 1 with ExpansionService

use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.

the class KafkaIOExternalTest method testConstructKafkaReadWithoutMetadata.

@Test
public void testConstructKafkaReadWithoutMetadata() throws Exception {
    List<String> topics = ImmutableList.of("topic1", "topic2");
    String keyDeserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
    String valueDeserializer = "org.apache.kafka.common.serialization.LongDeserializer";
    ImmutableMap<String, String> consumerConfig = ImmutableMap.<String, String>builder().put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "server1:port,server2:port").put("key2", "value2").put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer).put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer).build();
    Long startReadTime = 100L;
    ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topics", FieldType.array(FieldType.STRING)), Field.of("consumer_config", FieldType.map(FieldType.STRING, FieldType.STRING)), Field.of("key_deserializer", FieldType.STRING), Field.of("value_deserializer", FieldType.STRING), Field.of("start_read_time", FieldType.INT64), Field.of("commit_offset_in_finalize", FieldType.BOOLEAN), Field.of("timestamp_policy", FieldType.STRING))).withFieldValue("topics", topics).withFieldValue("consumer_config", consumerConfig).withFieldValue("key_deserializer", keyDeserializer).withFieldValue("value_deserializer", valueDeserializer).withFieldValue("start_read_time", startReadTime).withFieldValue("commit_offset_in_finalize", false).withFieldValue("timestamp_policy", "ProcessingTime").build());
    RunnerApi.Components defaultInstance = RunnerApi.Components.getDefaultInstance();
    ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(defaultInstance).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(org.apache.beam.sdk.io.kafka.KafkaIO.Read.External.URN_WITHOUT_METADATA).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
    ExpansionService expansionService = new ExpansionService();
    TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
    expansionService.expand(request, observer);
    ExpansionApi.ExpansionResponse result = observer.result;
    RunnerApi.PTransform transform = result.getTransform();
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*KafkaIO-Read.*")));
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Remove-Kafka-Metadata.*")));
    assertThat(transform.getInputsCount(), Matchers.is(0));
    assertThat(transform.getOutputsCount(), Matchers.is(1));
    RunnerApi.PTransform kafkaReadComposite = result.getComponents().getTransformsOrThrow(transform.getSubtransforms(0));
    result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0));
    verifyKafkaReadComposite(result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0)), result);
}
Also used : ExpansionService(org.apache.beam.sdk.expansion.service.ExpansionService) ExternalTransforms(org.apache.beam.model.pipeline.v1.ExternalTransforms) ByteString(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) ExpansionApi(org.apache.beam.model.expansion.v1.ExpansionApi) ExternalConfigurationPayload(org.apache.beam.model.pipeline.v1.ExternalTransforms.ExternalConfigurationPayload) Test(org.junit.Test)

Example 2 with ExpansionService

use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.

the class PubsubIOExternalTest method testConstructPubsubRead.

@Test
public void testConstructPubsubRead() throws Exception {
    String topic = "projects/project-1234/topics/topic_name";
    String idAttribute = "id_foo";
    Boolean needsAttributes = true;
    ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topic", FieldType.STRING), Field.of("id_label", FieldType.STRING), Field.of("with_attributes", FieldType.BOOLEAN))).withFieldValue("topic", topic).withFieldValue("id_label", idAttribute).withFieldValue("with_attributes", needsAttributes).build());
    RunnerApi.Components defaultInstance = RunnerApi.Components.getDefaultInstance();
    ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(defaultInstance).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(ExternalRead.URN).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
    ExpansionService expansionService = new ExpansionService();
    TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
    expansionService.expand(request, observer);
    ExpansionApi.ExpansionResponse result = observer.result;
    RunnerApi.PTransform transform = result.getTransform();
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*PubsubUnboundedSource.*")));
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*MapElements.*")));
    assertThat(transform.getInputsCount(), Matchers.is(0));
    assertThat(transform.getOutputsCount(), Matchers.is(1));
}
Also used : ExpansionService(org.apache.beam.sdk.expansion.service.ExpansionService) ExternalTransforms(org.apache.beam.model.pipeline.v1.ExternalTransforms) ByteString(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) ExpansionApi(org.apache.beam.model.expansion.v1.ExpansionApi) Test(org.junit.Test)

Example 3 with ExpansionService

use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.

the class JobServerDriver method createExpansionService.

@Nullable
private ExpansionServer createExpansionService() throws IOException {
    // Skip creating expansion server if configured port is negative.
    if (configuration.expansionPort < 0) {
        return null;
    }
    ExpansionServer expansionServer = ExpansionServer.create(new ExpansionService(), configuration.host, configuration.expansionPort);
    LOG.info("Java ExpansionService started on {}:{}", expansionServer.getHost(), expansionServer.getPort());
    return expansionServer;
}
Also used : ExpansionService(org.apache.beam.sdk.expansion.service.ExpansionService) ExpansionServer(org.apache.beam.sdk.expansion.service.ExpansionServer) Nullable(javax.annotation.Nullable)

Example 4 with ExpansionService

use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.

the class KafkaIOExternalTest method testConstructKafkaWrite.

@Test
public void testConstructKafkaWrite() throws Exception {
    String topic = "topic";
    String keySerializer = "org.apache.kafka.common.serialization.ByteArraySerializer";
    String valueSerializer = "org.apache.kafka.common.serialization.LongSerializer";
    ImmutableMap<String, String> producerConfig = ImmutableMap.<String, String>builder().put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "server1:port,server2:port").put("retries", "3").build();
    ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topic", FieldType.STRING), Field.of("producer_config", FieldType.map(FieldType.STRING, FieldType.STRING)), Field.of("key_serializer", FieldType.STRING), Field.of("value_serializer", FieldType.STRING))).withFieldValue("topic", topic).withFieldValue("producer_config", producerConfig).withFieldValue("key_serializer", keySerializer).withFieldValue("value_serializer", valueSerializer).build());
    Pipeline p = Pipeline.create();
    p.apply(Impulse.create()).apply(WithKeys.of("key"));
    RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p);
    String inputPCollection = Iterables.getOnlyElement(Iterables.getLast(pipelineProto.getComponents().getTransformsMap().values()).getOutputsMap().values());
    ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(pipelineProto.getComponents()).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").putInputs("input", inputPCollection).setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(org.apache.beam.sdk.io.kafka.KafkaIO.Write.External.URN).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
    ExpansionService expansionService = new ExpansionService();
    TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
    expansionService.expand(request, observer);
    ExpansionApi.ExpansionResponse result = observer.result;
    RunnerApi.PTransform transform = result.getTransform();
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Kafka-ProducerRecord.*")));
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*KafkaIO-WriteRecords.*")));
    assertThat(transform.getInputsCount(), Matchers.is(1));
    assertThat(transform.getOutputsCount(), Matchers.is(0));
    RunnerApi.PTransform writeComposite = result.getComponents().getTransformsOrThrow(transform.getSubtransforms(1));
    RunnerApi.PTransform writeParDo = result.getComponents().getTransformsOrThrow(result.getComponents().getTransformsOrThrow(writeComposite.getSubtransforms(0)).getSubtransforms(0));
    RunnerApi.ParDoPayload parDoPayload = RunnerApi.ParDoPayload.parseFrom(writeParDo.getSpec().getPayload());
    DoFn kafkaWriter = ParDoTranslation.getDoFn(parDoPayload);
    assertThat(kafkaWriter, Matchers.instanceOf(KafkaWriter.class));
    KafkaIO.WriteRecords spec = (KafkaIO.WriteRecords) Whitebox.getInternalState(kafkaWriter, "spec");
    assertThat(spec.getProducerConfig(), Matchers.is(producerConfig));
    assertThat(spec.getTopic(), Matchers.is(topic));
    assertThat(spec.getKeySerializer().getName(), Matchers.is(keySerializer));
    assertThat(spec.getValueSerializer().getName(), Matchers.is(valueSerializer));
}
Also used : ExpansionService(org.apache.beam.sdk.expansion.service.ExpansionService) ExternalTransforms(org.apache.beam.model.pipeline.v1.ExternalTransforms) ByteString(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString) Pipeline(org.apache.beam.sdk.Pipeline) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) DoFn(org.apache.beam.sdk.transforms.DoFn) ExpansionApi(org.apache.beam.model.expansion.v1.ExpansionApi) ExternalConfigurationPayload(org.apache.beam.model.pipeline.v1.ExternalTransforms.ExternalConfigurationPayload) Test(org.junit.Test)

Example 5 with ExpansionService

use of org.apache.beam.sdk.expansion.service.ExpansionService in project beam by apache.

the class KafkaIOExternalTest method testConstructKafkaRead.

@Test
public void testConstructKafkaRead() throws Exception {
    List<String> topics = ImmutableList.of("topic1", "topic2");
    String keyDeserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
    String valueDeserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
    ImmutableMap<String, String> consumerConfig = ImmutableMap.<String, String>builder().put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "server1:port,server2:port").put("key2", "value2").put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer).put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer).build();
    Long startReadTime = 100L;
    ExternalTransforms.ExternalConfigurationPayload payload = encodeRow(Row.withSchema(Schema.of(Field.of("topics", FieldType.array(FieldType.STRING)), Field.of("consumer_config", FieldType.map(FieldType.STRING, FieldType.STRING)), Field.of("key_deserializer", FieldType.STRING), Field.of("value_deserializer", FieldType.STRING), Field.of("start_read_time", FieldType.INT64), Field.of("commit_offset_in_finalize", FieldType.BOOLEAN), Field.of("timestamp_policy", FieldType.STRING))).withFieldValue("topics", topics).withFieldValue("consumer_config", consumerConfig).withFieldValue("key_deserializer", keyDeserializer).withFieldValue("value_deserializer", valueDeserializer).withFieldValue("start_read_time", startReadTime).withFieldValue("commit_offset_in_finalize", false).withFieldValue("timestamp_policy", "ProcessingTime").build());
    RunnerApi.Components defaultInstance = RunnerApi.Components.getDefaultInstance();
    ExpansionApi.ExpansionRequest request = ExpansionApi.ExpansionRequest.newBuilder().setComponents(defaultInstance).setTransform(RunnerApi.PTransform.newBuilder().setUniqueName("test").setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(org.apache.beam.sdk.io.kafka.KafkaIO.Read.External.URN_WITH_METADATA).setPayload(payload.toByteString()))).setNamespace("test_namespace").build();
    ExpansionService expansionService = new ExpansionService();
    TestStreamObserver<ExpansionApi.ExpansionResponse> observer = new TestStreamObserver<>();
    expansionService.expand(request, observer);
    ExpansionApi.ExpansionResponse result = observer.result;
    RunnerApi.PTransform transform = result.getTransform();
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*KafkaIO-Read.*")));
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Convert-to-ExternalKafkaRecord.*")));
    assertThat(transform.getSubtransformsList(), Matchers.hasItem(MatchesPattern.matchesPattern(".*Convert-ConvertTransform.*")));
    assertThat(transform.getInputsCount(), Matchers.is(0));
    assertThat(transform.getOutputsCount(), Matchers.is(1));
    RunnerApi.PTransform kafkaReadComposite = result.getComponents().getTransformsOrThrow(transform.getSubtransforms(0));
    verifyKafkaReadComposite(result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0)), result);
}
Also used : ExpansionService(org.apache.beam.sdk.expansion.service.ExpansionService) ExternalTransforms(org.apache.beam.model.pipeline.v1.ExternalTransforms) ByteString(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) ExpansionApi(org.apache.beam.model.expansion.v1.ExpansionApi) ExternalConfigurationPayload(org.apache.beam.model.pipeline.v1.ExternalTransforms.ExternalConfigurationPayload) Test(org.junit.Test)

Aggregations

ExpansionService (org.apache.beam.sdk.expansion.service.ExpansionService)6 ExpansionApi (org.apache.beam.model.expansion.v1.ExpansionApi)5 ExternalTransforms (org.apache.beam.model.pipeline.v1.ExternalTransforms)5 RunnerApi (org.apache.beam.model.pipeline.v1.RunnerApi)5 ByteString (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString)5 Test (org.junit.Test)5 ExternalConfigurationPayload (org.apache.beam.model.pipeline.v1.ExternalTransforms.ExternalConfigurationPayload)3 Pipeline (org.apache.beam.sdk.Pipeline)2 Nullable (javax.annotation.Nullable)1 ExpansionServer (org.apache.beam.sdk.expansion.service.ExpansionServer)1 ValueProvider (org.apache.beam.sdk.options.ValueProvider)1 DoFn (org.apache.beam.sdk.transforms.DoFn)1