Search in sources :

Example 11 with SerializableFunction

use of org.apache.beam.sdk.transforms.SerializableFunction in project beam by apache.

the class SparkCoGroupByKeyStreamingTest method testInStreamingMode.

@Category(StreamingTest.class)
@Test
public void testInStreamingMode() throws Exception {
    Instant instant = new Instant(0);
    CreateStream<KV<Integer, Integer>> source1 = CreateStream.of(KvCoder.of(VarIntCoder.of(), VarIntCoder.of()), batchDuration()).emptyBatch().advanceWatermarkForNextBatch(instant).nextBatch(TimestampedValue.of(KV.of(1, 1), instant), TimestampedValue.of(KV.of(1, 2), instant), TimestampedValue.of(KV.of(1, 3), instant)).advanceWatermarkForNextBatch(instant.plus(Duration.standardSeconds(1L))).nextBatch(TimestampedValue.of(KV.of(2, 4), instant.plus(Duration.standardSeconds(1L))), TimestampedValue.of(KV.of(2, 5), instant.plus(Duration.standardSeconds(1L))), TimestampedValue.of(KV.of(2, 6), instant.plus(Duration.standardSeconds(1L)))).advanceNextBatchWatermarkToInfinity();
    CreateStream<KV<Integer, Integer>> source2 = CreateStream.of(KvCoder.of(VarIntCoder.of(), VarIntCoder.of()), batchDuration()).emptyBatch().advanceWatermarkForNextBatch(instant).nextBatch(TimestampedValue.of(KV.of(1, 11), instant), TimestampedValue.of(KV.of(1, 12), instant), TimestampedValue.of(KV.of(1, 13), instant)).advanceWatermarkForNextBatch(instant.plus(Duration.standardSeconds(1L))).nextBatch(TimestampedValue.of(KV.of(2, 14), instant.plus(Duration.standardSeconds(1L))), TimestampedValue.of(KV.of(2, 15), instant.plus(Duration.standardSeconds(1L))), TimestampedValue.of(KV.of(2, 16), instant.plus(Duration.standardSeconds(1L)))).advanceNextBatchWatermarkToInfinity();
    PCollection<KV<Integer, Integer>> input1 = pipeline.apply("create source1", source1).apply("window input1", Window.<KV<Integer, Integer>>into(FixedWindows.of(Duration.standardSeconds(3L))).withAllowedLateness(Duration.ZERO));
    PCollection<KV<Integer, Integer>> input2 = pipeline.apply("create source2", source2).apply("window input2", Window.<KV<Integer, Integer>>into(FixedWindows.of(Duration.standardSeconds(3L))).withAllowedLateness(Duration.ZERO));
    PCollection<KV<Integer, CoGbkResult>> output = KeyedPCollectionTuple.of(INPUT1_TAG, input1).and(INPUT2_TAG, input2).apply(CoGroupByKey.create());
    PAssert.that("Wrong output of the join using CoGroupByKey in streaming mode", output).satisfies((SerializableFunction<Iterable<KV<Integer, CoGbkResult>>, Void>) input -> {
        assertEquals("Wrong size of the output PCollection", 2, Iterables.size(input));
        for (KV<Integer, CoGbkResult> element : input) {
            if (element.getKey() == 1) {
                Iterable<Integer> input1Elements = element.getValue().getAll(INPUT1_TAG);
                assertEquals("Wrong number of values for output elements for tag input1 and key 1", 3, Iterables.size(input1Elements));
                assertThat("Elements of PCollection input1 for key \"1\" are not present in the output PCollection", input1Elements, containsInAnyOrder(1, 2, 3));
                Iterable<Integer> input2Elements = element.getValue().getAll(INPUT2_TAG);
                assertEquals("Wrong number of values for output elements for tag input2 and key 1", 3, Iterables.size(input2Elements));
                assertThat("Elements of PCollection input2 for key \"1\" are not present in the output PCollection", input2Elements, containsInAnyOrder(11, 12, 13));
            } else if (element.getKey() == 2) {
                Iterable<Integer> input1Elements = element.getValue().getAll(INPUT1_TAG);
                assertEquals("Wrong number of values for output elements for tag input1 and key 2", 3, Iterables.size(input1Elements));
                assertThat("Elements of PCollection input1 for key \"2\" are not present in the output PCollection", input1Elements, containsInAnyOrder(4, 5, 6));
                Iterable<Integer> input2Elements = element.getValue().getAll(INPUT2_TAG);
                assertEquals("Wrong number of values for output elements for tag input2 and key 2", 3, Iterables.size(input2Elements));
                assertThat("Elements of PCollection input2 for key \"2\" are not present in the output PCollection", input2Elements, containsInAnyOrder(14, 15, 16));
            } else {
                fail("Unknown key in the output PCollection");
            }
        }
        return null;
    });
    pipeline.run();
}
Also used : KV(org.apache.beam.sdk.values.KV) StreamingTest(org.apache.beam.runners.spark.StreamingTest) Duration(org.joda.time.Duration) SerializableFunction(org.apache.beam.sdk.transforms.SerializableFunction) ReuseSparkContextRule(org.apache.beam.runners.spark.ReuseSparkContextRule) CoGbkResult(org.apache.beam.sdk.transforms.join.CoGbkResult) TupleTag(org.apache.beam.sdk.values.TupleTag) CreateStream(org.apache.beam.runners.spark.io.CreateStream) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) Iterables(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables) Window(org.apache.beam.sdk.transforms.windowing.Window) Assert.fail(org.junit.Assert.fail) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) KeyedPCollectionTuple(org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple) TimestampedValue(org.apache.beam.sdk.values.TimestampedValue) KvCoder(org.apache.beam.sdk.coders.KvCoder) PAssert(org.apache.beam.sdk.testing.PAssert) FixedWindows(org.apache.beam.sdk.transforms.windowing.FixedWindows) Test(org.junit.Test) PCollection(org.apache.beam.sdk.values.PCollection) Category(org.junit.experimental.categories.Category) CoGroupByKey(org.apache.beam.sdk.transforms.join.CoGroupByKey) Rule(org.junit.Rule) Matchers.containsInAnyOrder(org.hamcrest.Matchers.containsInAnyOrder) Instant(org.joda.time.Instant) VarIntCoder(org.apache.beam.sdk.coders.VarIntCoder) SparkPipelineOptions(org.apache.beam.runners.spark.SparkPipelineOptions) Assert.assertEquals(org.junit.Assert.assertEquals) Instant(org.joda.time.Instant) KV(org.apache.beam.sdk.values.KV) CoGbkResult(org.apache.beam.sdk.transforms.join.CoGbkResult) Category(org.junit.experimental.categories.Category) StreamingTest(org.apache.beam.runners.spark.StreamingTest) Test(org.junit.Test)

Example 12 with SerializableFunction

use of org.apache.beam.sdk.transforms.SerializableFunction in project beam by apache.

the class BigQueryIOWriteTest method testWriteAvroWithCustomWriter.

@Test
public void testWriteAvroWithCustomWriter() throws Exception {
    if (useStorageApi || useStreaming) {
        return;
    }
    SerializableFunction<AvroWriteRequest<InputRecord>, GenericRecord> formatFunction = r -> {
        GenericRecord rec = new GenericData.Record(r.getSchema());
        InputRecord i = r.getElement();
        rec.put("strVal", i.strVal());
        rec.put("longVal", i.longVal());
        rec.put("doubleVal", i.doubleVal());
        rec.put("instantVal", i.instantVal().getMillis() * 1000);
        return rec;
    };
    SerializableFunction<org.apache.avro.Schema, DatumWriter<GenericRecord>> customWriterFactory = s -> new GenericDatumWriter<GenericRecord>() {

        @Override
        protected void writeString(org.apache.avro.Schema schema, Object datum, Encoder out) throws IOException {
            super.writeString(schema, datum.toString() + "_custom", out);
        }
    };
    p.apply(Create.of(InputRecord.create("test", 1, 1.0, Instant.parse("2019-01-01T00:00:00Z")), InputRecord.create("test2", 2, 2.0, Instant.parse("2019-02-01T00:00:00Z"))).withCoder(INPUT_RECORD_CODER)).apply(BigQueryIO.<InputRecord>write().to("dataset-id.table-id").withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED).withSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("strVal").setType("STRING"), new TableFieldSchema().setName("longVal").setType("INTEGER"), new TableFieldSchema().setName("doubleVal").setType("FLOAT"), new TableFieldSchema().setName("instantVal").setType("TIMESTAMP")))).withTestServices(fakeBqServices).withAvroWriter(formatFunction, customWriterFactory).withoutValidation());
    p.run();
    assertThat(fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"), containsInAnyOrder(new TableRow().set("strVal", "test_custom").set("longVal", "1").set("doubleVal", 1.0D).set("instantVal", "2019-01-01 00:00:00 UTC"), new TableRow().set("strVal", "test2_custom").set("longVal", "2").set("doubleVal", 2.0D).set("instantVal", "2019-02-01 00:00:00 UTC")));
}
Also used : ExpectedLogs(org.apache.beam.sdk.testing.ExpectedLogs) SerializableCoder(org.apache.beam.sdk.coders.SerializableCoder) ValueInSingleWindow(org.apache.beam.sdk.values.ValueInSingleWindow) BigQueryHelpers.toJsonString(org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.toJsonString) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) SimpleFunction(org.apache.beam.sdk.transforms.SimpleFunction) Encoder(org.apache.avro.io.Encoder) ResultCoder(org.apache.beam.sdk.io.gcp.bigquery.WritePartition.ResultCoder) Matcher(java.util.regex.Matcher) DoFnTester(org.apache.beam.sdk.transforms.DoFnTester) Create(org.apache.beam.sdk.transforms.Create) Map(java.util.Map) Window(org.apache.beam.sdk.transforms.windowing.Window) GlobalWindow(org.apache.beam.sdk.transforms.windowing.GlobalWindow) FakeBigQueryServices(org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices) EnumSet(java.util.EnumSet) ValueProvider(org.apache.beam.sdk.options.ValueProvider) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) KvCoder(org.apache.beam.sdk.coders.KvCoder) Matchers.allOf(org.hamcrest.Matchers.allOf) Set(java.util.Set) WindowFn(org.apache.beam.sdk.transforms.windowing.WindowFn) FieldType(org.apache.beam.sdk.schemas.Schema.FieldType) Serializable(java.io.Serializable) IncompatibleWindowException(org.apache.beam.sdk.transforms.windowing.IncompatibleWindowException) Assert.assertFalse(org.junit.Assert.assertFalse) AutoValue(com.google.auto.value.AutoValue) TestStream(org.apache.beam.sdk.testing.TestStream) Matchers.is(org.hamcrest.Matchers.is) DisplayDataMatchers.hasDisplayItem(org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem) Write(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write) Method(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.Method) Preconditions.checkNotNull(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull) KV(org.apache.beam.sdk.values.KV) FakeDatasetService(org.apache.beam.sdk.io.gcp.testing.FakeDatasetService) Duration(org.joda.time.Duration) RunWith(org.junit.runner.RunWith) View(org.apache.beam.sdk.transforms.View) ArrayList(java.util.ArrayList) GenericData(org.apache.avro.generic.GenericData) Distinct(org.apache.beam.sdk.transforms.Distinct) Multimap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Multimap) TupleTag(org.apache.beam.sdk.values.TupleTag) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) Preconditions.checkArgument(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument) Maps(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps) StreamSupport(java.util.stream.StreamSupport) JavaFieldSchema(org.apache.beam.sdk.schemas.JavaFieldSchema) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Row(org.apache.beam.sdk.values.Row) Result(org.apache.beam.sdk.io.gcp.bigquery.WriteTables.Result) Before(org.junit.Before) TableReference(com.google.api.services.bigquery.model.TableReference) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) Files(java.nio.file.Files) PAssert(org.apache.beam.sdk.testing.PAssert) NonMergingWindowFn(org.apache.beam.sdk.transforms.windowing.NonMergingWindowFn) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) ShardedKeyCoder(org.apache.beam.sdk.coders.ShardedKeyCoder) Test(org.junit.Test) Schema(org.apache.beam.sdk.schemas.Schema) File(java.io.File) Assert.assertNull(org.junit.Assert.assertNull) Paths(java.nio.file.Paths) Preconditions.checkState(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState) PCollectionView(org.apache.beam.sdk.values.PCollectionView) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) AtomicCoder(org.apache.beam.sdk.coders.AtomicCoder) DefaultSchema(org.apache.beam.sdk.schemas.annotations.DefaultSchema) FakeJobService(org.apache.beam.sdk.io.gcp.testing.FakeJobService) Assert.assertEquals(org.junit.Assert.assertEquals) SerializableFunction(org.apache.beam.sdk.transforms.SerializableFunction) TimePartitioning(com.google.api.services.bigquery.model.TimePartitioning) Iterables(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables) After(org.junit.After) TableRow(com.google.api.services.bigquery.model.TableRow) Assert.fail(org.junit.Assert.fail) TableSchema(com.google.api.services.bigquery.model.TableSchema) ArrayListMultimap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ArrayListMultimap) ShardedKey(org.apache.beam.sdk.values.ShardedKey) Parameterized(org.junit.runners.Parameterized) MapElements(org.apache.beam.sdk.transforms.MapElements) DatumWriter(org.apache.avro.io.DatumWriter) Collection(java.util.Collection) GenerateSequence(org.apache.beam.sdk.io.GenerateSequence) CreateDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition) Description(org.junit.runner.Description) Collectors(java.util.stream.Collectors) List(java.util.List) Clustering(com.google.api.services.bigquery.model.Clustering) Matchers.containsInAnyOrder(org.hamcrest.Matchers.containsInAnyOrder) TableDataInsertAllResponse(com.google.api.services.bigquery.model.TableDataInsertAllResponse) Matchers.equalTo(org.hamcrest.Matchers.equalTo) TypeDescriptors(org.apache.beam.sdk.values.TypeDescriptors) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) Pattern(java.util.regex.Pattern) ErrorProto(com.google.api.services.bigquery.model.ErrorProto) Statement(org.junit.runners.model.Statement) TestRule(org.junit.rules.TestRule) Parameters(org.junit.runners.Parameterized.Parameters) Coder(org.apache.beam.sdk.coders.Coder) HashMap(java.util.HashMap) SerializableFunctions(org.apache.beam.sdk.transforms.SerializableFunctions) StringUtf8Coder(org.apache.beam.sdk.coders.StringUtf8Coder) SchemaUpdateOption(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.SchemaUpdateOption) WindowMappingFn(org.apache.beam.sdk.transforms.windowing.WindowMappingFn) SchemaCreate(org.apache.beam.sdk.schemas.annotations.SchemaCreate) Job(com.google.api.services.bigquery.model.Job) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) ExpectedException(org.junit.rules.ExpectedException) Nullable(org.checkerframework.checker.nullness.qual.Nullable) Matchers.hasEntry(org.hamcrest.Matchers.hasEntry) OutputStream(java.io.OutputStream) DisplayData(org.apache.beam.sdk.transforms.display.DisplayData) GenericRecord(org.apache.avro.generic.GenericRecord) Lists(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists) Matchers(org.hamcrest.Matchers) PCollection(org.apache.beam.sdk.values.PCollection) Table(com.google.api.services.bigquery.model.Table) Rule(org.junit.Rule) Instant(org.joda.time.Instant) Collections(java.util.Collections) JobConfigurationLoad(com.google.api.services.bigquery.model.JobConfigurationLoad) TemporaryFolder(org.junit.rules.TemporaryFolder) InputStream(java.io.InputStream) TableSchema(com.google.api.services.bigquery.model.TableSchema) JavaFieldSchema(org.apache.beam.sdk.schemas.JavaFieldSchema) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) Schema(org.apache.beam.sdk.schemas.Schema) DefaultSchema(org.apache.beam.sdk.schemas.annotations.DefaultSchema) TableSchema(com.google.api.services.bigquery.model.TableSchema) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) GenericData(org.apache.avro.generic.GenericData) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) DatumWriter(org.apache.avro.io.DatumWriter) Encoder(org.apache.avro.io.Encoder) TableRow(com.google.api.services.bigquery.model.TableRow) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 13 with SerializableFunction

use of org.apache.beam.sdk.transforms.SerializableFunction in project beam by apache.

the class ConvertHelpers method getConvertPrimitive.

/**
 * Returns a function to convert a Row into a primitive type. This only works when the row schema
 * contains a single field, and that field is convertible to the primitive type.
 */
@SuppressWarnings("unchecked")
public static <OutputT> SerializableFunction<?, OutputT> getConvertPrimitive(FieldType fieldType, TypeDescriptor<?> outputTypeDescriptor, TypeConversionsFactory typeConversionsFactory) {
    FieldType expectedFieldType = StaticSchemaInference.fieldFromType(outputTypeDescriptor, JavaFieldTypeSupplier.INSTANCE);
    if (!expectedFieldType.equals(fieldType)) {
        throw new IllegalArgumentException("Element argument type " + outputTypeDescriptor + " does not work with expected schema field type " + fieldType);
    }
    Type expectedInputType = typeConversionsFactory.createTypeConversion(false).convert(outputTypeDescriptor);
    TypeDescriptor<?> outputType = outputTypeDescriptor;
    if (outputType.getRawType().isPrimitive()) {
        // A SerializableFunction can only return an Object type, so if the DoFn parameter is a
        // primitive type, then box it for the return. The return type will be unboxed before being
        // forwarded to the DoFn parameter.
        outputType = TypeDescriptor.of(Primitives.wrap(outputType.getRawType()));
    }
    TypeDescription.Generic genericType = TypeDescription.Generic.Builder.parameterizedType(SerializableFunction.class, expectedInputType, outputType.getType()).build();
    DynamicType.Builder<SerializableFunction> builder = (DynamicType.Builder<SerializableFunction>) new ByteBuddy().subclass(genericType);
    try {
        return builder.visit(new AsmVisitorWrapper.ForDeclaredMethods().writerFlags(ClassWriter.COMPUTE_FRAMES)).method(ElementMatchers.named("apply")).intercept(new ConvertPrimitiveInstruction(outputType, typeConversionsFactory)).make().load(ReflectHelpers.findClassLoader(), ClassLoadingStrategy.Default.INJECTION).getLoaded().getDeclaredConstructor().newInstance();
    } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
        throw new RuntimeException(e);
    }
}
Also used : SerializableFunction(org.apache.beam.sdk.transforms.SerializableFunction) DynamicType(org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.dynamic.DynamicType) ByteBuddy(org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.ByteBuddy) InvocationTargetException(java.lang.reflect.InvocationTargetException) FieldType(org.apache.beam.sdk.schemas.Schema.FieldType) DynamicType(org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.dynamic.DynamicType) InstrumentedType(org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.dynamic.scaffold.InstrumentedType) FieldType(org.apache.beam.sdk.schemas.Schema.FieldType) Type(java.lang.reflect.Type) TypeDescription(org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.description.type.TypeDescription) AsmVisitorWrapper(org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.asm.AsmVisitorWrapper)

Example 14 with SerializableFunction

use of org.apache.beam.sdk.transforms.SerializableFunction in project beam by apache.

the class BigQueryHllSketchCompatibilityIT method writeSketchToBigQuery.

private void writeSketchToBigQuery(List<String> testData, String expectedChecksum) {
    String tableSpec = String.format("%s.%s", DATASET_ID, SKETCH_TABLE_ID);
    String query = String.format("SELECT HLL_COUNT.EXTRACT(%s) FROM %s", SKETCH_FIELD_NAME, tableSpec);
    TableSchema tableSchema = new TableSchema().setFields(Collections.singletonList(new TableFieldSchema().setName(SKETCH_FIELD_NAME).setType(SKETCH_FIELD_TYPE)));
    TestPipelineOptions options = TestPipeline.testingPipelineOptions().as(TestPipelineOptions.class);
    Pipeline p = Pipeline.create(options);
    // until we have a stub class for BigQuery TableRow
    @SuppressWarnings("nullness") SerializableFunction<byte[], TableRow> formatFn = sketch -> new TableRow().set(SKETCH_FIELD_NAME, sketch.length == 0 ? null : sketch);
    p.apply(Create.of(testData).withType(TypeDescriptor.of(String.class))).apply(HllCount.Init.forStrings().globally()).apply(BigQueryIO.<byte[]>write().to(tableSpec).withSchema(tableSchema).withFormatFunction(formatFn).withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE));
    p.run().waitUntilFinish();
    // BigqueryMatcher will send a query to retrieve the estimated count and verifies its
    // correctness using checksum.
    assertThat(createQueryUsingStandardSql(APP_NAME, PROJECT_ID, query), queryResultHasChecksum(expectedChecksum));
}
Also used : Arrays(java.util.Arrays) TestPipelineOptions(org.apache.beam.sdk.testing.TestPipelineOptions) TypeDescriptor(org.apache.beam.sdk.values.TypeDescriptor) BeforeClass(org.junit.BeforeClass) BigqueryMatcher.queryResultHasChecksum(org.apache.beam.sdk.io.gcp.testing.BigqueryMatcher.queryResultHasChecksum) Date(java.util.Date) RunWith(org.junit.runner.RunWith) SerializableFunction(org.apache.beam.sdk.transforms.SerializableFunction) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) ByteBuffer(java.nio.ByteBuffer) Create(org.apache.beam.sdk.transforms.Create) Map(java.util.Map) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) TableRow(com.google.api.services.bigquery.model.TableRow) TableSchema(com.google.api.services.bigquery.model.TableSchema) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Pipeline(org.apache.beam.sdk.Pipeline) BigqueryClient(org.apache.beam.sdk.io.gcp.testing.BigqueryClient) TableReference(com.google.api.services.bigquery.model.TableReference) AfterClass(org.junit.AfterClass) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) GcpOptions(org.apache.beam.sdk.extensions.gcp.options.GcpOptions) PAssert(org.apache.beam.sdk.testing.PAssert) BigQueryIO(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO) ApplicationNameOptions(org.apache.beam.sdk.options.ApplicationNameOptions) Method(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead.Method) Test(org.junit.Test) JUnit4(org.junit.runners.JUnit4) PCollection(org.apache.beam.sdk.values.PCollection) Collectors(java.util.stream.Collectors) Table(com.google.api.services.bigquery.model.Table) DataFormat(com.google.cloud.bigquery.storage.v1.DataFormat) List(java.util.List) ByteArrayCoder(org.apache.beam.sdk.coders.ByteArrayCoder) BigqueryMatcher.createQueryUsingStandardSql(org.apache.beam.sdk.io.gcp.testing.BigqueryMatcher.createQueryUsingStandardSql) Collections(java.util.Collections) TableSchema(com.google.api.services.bigquery.model.TableSchema) TableRow(com.google.api.services.bigquery.model.TableRow) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) TestPipelineOptions(org.apache.beam.sdk.testing.TestPipelineOptions) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) Pipeline(org.apache.beam.sdk.Pipeline)

Example 15 with SerializableFunction

use of org.apache.beam.sdk.transforms.SerializableFunction in project beam by apache.

the class Neo4jIOIT method testLargeWriteUnwind.

@Test
public void testLargeWriteUnwind() throws Exception {
    final int startId = 5000;
    final int endId = 6000;
    // Create 1000 IDs
    List<Integer> idList = new ArrayList<>();
    for (int id = startId; id < endId; id++) {
        idList.add(id);
    }
    PCollection<Integer> idCollection = largeWriteUnwindPipeline.apply(Create.of(idList));
    // Every row is represented by a Map<String, Object> in the parameters map.
    // We accumulate the rows and 'unwind' those to Neo4j for performance reasons.
    // 
    SerializableFunction<Integer, Map<String, Object>> parametersFunction = id -> ImmutableMap.of("id", id, "name", "Casters", "firstName", "Matt");
    // 1000 rows with a batch size of 123 should trigger most scenarios we can think of
    // We've put a unique constraint on Something.id
    // 
    Neo4jIO.WriteUnwind<Integer> read = Neo4jIO.<Integer>writeUnwind().withDriverConfiguration(Neo4jTestUtil.getDriverConfiguration(containerHostname, containerPort)).withSessionConfig(SessionConfig.forDatabase(Neo4jTestUtil.NEO4J_DATABASE)).withBatchSize(123).withUnwindMapName("rows").withCypher("UNWIND $rows AS row CREATE(n:Something { id : row.id })").withParametersFunction(parametersFunction).withCypherLogging();
    idCollection.apply(read);
    // Now run this pipeline
    // 
    PipelineResult pipelineResult = largeWriteUnwindPipeline.run();
    Assert.assertEquals(PipelineResult.State.DONE, pipelineResult.getState());
    // 
    try (Driver driver = Neo4jTestUtil.getDriver(containerHostname, containerPort)) {
        try (Session session = Neo4jTestUtil.getSession(driver, true)) {
            List<Integer> values = session.readTransaction(tx -> {
                List<Integer> v = null;
                int nrRows = 0;
                Result result = tx.run("MATCH(n:Something) RETURN count(n), min(n.id), max(n.id)");
                while (result.hasNext()) {
                    Record record = result.next();
                    v = Arrays.asList(record.get(0).asInt(), record.get(1).asInt(), record.get(2).asInt(), ++nrRows);
                }
                return v;
            });
            Assert.assertNotNull(values);
            assertThat(values, contains(endId - startId, startId, endId - 1, 1));
        }
    }
}
Also used : Session(org.neo4j.driver.Session) Arrays(java.util.Arrays) SerializableCoder(org.apache.beam.sdk.coders.SerializableCoder) BeforeClass(org.junit.BeforeClass) IsIterableContainingInOrder.contains(org.hamcrest.collection.IsIterableContainingInOrder.contains) DockerImageName(org.testcontainers.utility.DockerImageName) IsIterableContainingInAnyOrder.containsInAnyOrder(org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder) PipelineResult(org.apache.beam.sdk.PipelineResult) RunWith(org.junit.runner.RunWith) SerializableFunction(org.apache.beam.sdk.transforms.SerializableFunction) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) ArrayList(java.util.ArrayList) Create(org.apache.beam.sdk.transforms.Create) Map(java.util.Map) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Row(org.apache.beam.sdk.values.Row) DoFn(org.apache.beam.sdk.transforms.DoFn) AfterClass(org.junit.AfterClass) Driver(org.neo4j.driver.Driver) Neo4jContainer(org.testcontainers.containers.Neo4jContainer) PAssert(org.apache.beam.sdk.testing.PAssert) Test(org.junit.Test) JUnit4(org.junit.runners.JUnit4) PCollection(org.apache.beam.sdk.values.PCollection) Schema(org.apache.beam.sdk.schemas.Schema) Result(org.neo4j.driver.Result) List(java.util.List) Rule(org.junit.Rule) SessionConfig(org.neo4j.driver.SessionConfig) ParDo(org.apache.beam.sdk.transforms.ParDo) Assert(org.junit.Assert) Collections(java.util.Collections) Record(org.neo4j.driver.Record) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) Driver(org.neo4j.driver.Driver) PipelineResult(org.apache.beam.sdk.PipelineResult) Result(org.neo4j.driver.Result) Record(org.neo4j.driver.Record) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) Map(java.util.Map) Session(org.neo4j.driver.Session) Test(org.junit.Test)

Aggregations

SerializableFunction (org.apache.beam.sdk.transforms.SerializableFunction)37 Test (org.junit.Test)27 TestPipeline (org.apache.beam.sdk.testing.TestPipeline)23 PCollection (org.apache.beam.sdk.values.PCollection)22 PAssert (org.apache.beam.sdk.testing.PAssert)20 Instant (org.joda.time.Instant)17 Rule (org.junit.Rule)17 List (java.util.List)16 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)16 RunWith (org.junit.runner.RunWith)16 Map (java.util.Map)15 Duration (org.joda.time.Duration)14 JUnit4 (org.junit.runners.JUnit4)13 ArrayList (java.util.ArrayList)12 Collections (java.util.Collections)12 Create (org.apache.beam.sdk.transforms.Create)12 Arrays (java.util.Arrays)11 ParDo (org.apache.beam.sdk.transforms.ParDo)11 KV (org.apache.beam.sdk.values.KV)11 Assert.assertEquals (org.junit.Assert.assertEquals)10