use of org.apache.beam.sdk.schemas.transforms.Select in project beam by apache.
the class JdbcIOTest method testReadRowsWithNumericFieldsWithExcessPrecision.
@Test
public void testReadRowsWithNumericFieldsWithExcessPrecision() {
PCollection<Row> rows = pipeline.apply(JdbcIO.readRows().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withQuery(String.format("SELECT CAST(1 AS NUMERIC(10, 2)) AS T1 FROM %s WHERE name = ?", READ_TABLE_NAME)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
Schema expectedSchema = Schema.of(Schema.Field.of("T1", FieldType.logicalType(FixedPrecisionNumeric.of(NUMERIC.getName(), 10, 2)).withNullable(false)));
assertEquals(expectedSchema, rows.getSchema());
PCollection<Row> output = rows.apply(Select.fieldNames("T1"));
PAssert.that(output).containsInAnyOrder(ImmutableList.of(Row.withSchema(expectedSchema).addValues(BigDecimal.valueOf(1).setScale(2, RoundingMode.HALF_UP)).build()));
pipeline.run();
}
use of org.apache.beam.sdk.schemas.transforms.Select in project beam by apache.
the class JdbcIOTest method testWriteWithoutPreparedStatementWithReadRows.
@Test
public void testWriteWithoutPreparedStatementWithReadRows() throws Exception {
SerializableFunction<Void, DataSource> dataSourceProvider = ignored -> DATA_SOURCE;
PCollection<Row> rows = pipeline.apply(JdbcIO.readRows().withDataSourceProviderFn(dataSourceProvider).withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
String writeTableName = DatabaseTestHelper.getTestTableName("UT_WRITE_PS_WITH_READ_ROWS");
DatabaseTestHelper.createTable(DATA_SOURCE, writeTableName);
try {
rows.apply(JdbcIO.<Row>write().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withBatchSize(10L).withTable(writeTableName));
pipeline.run();
} finally {
DatabaseTestHelper.deleteTable(DATA_SOURCE, writeTableName);
}
}
use of org.apache.beam.sdk.schemas.transforms.Select in project beam by apache.
the class JdbcIOTest method testReadWithSchema.
@Test
public void testReadWithSchema() {
SerializableFunction<Void, DataSource> dataSourceProvider = ignored -> DATA_SOURCE;
JdbcIO.RowMapper<RowWithSchema> rowMapper = rs -> new RowWithSchema(rs.getString("NAME"), rs.getInt("ID"));
pipeline.getSchemaRegistry().registerJavaBean(RowWithSchema.class);
PCollection<RowWithSchema> rows = pipeline.apply(JdbcIO.<RowWithSchema>read().withDataSourceProviderFn(dataSourceProvider).withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME)).withRowMapper(rowMapper).withCoder(SerializableCoder.of(RowWithSchema.class)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
Schema expectedSchema = Schema.of(Schema.Field.of("name", Schema.FieldType.STRING), Schema.Field.of("id", Schema.FieldType.INT32));
assertEquals(expectedSchema, rows.getSchema());
PCollection<Row> output = rows.apply(Select.fieldNames("name", "id"));
PAssert.that(output).containsInAnyOrder(ImmutableList.of(Row.withSchema(expectedSchema).addValues("Testval1", 1).build()));
pipeline.run();
}
use of org.apache.beam.sdk.schemas.transforms.Select in project beam by apache.
the class JdbcIOTest method testReadRowsWithDataSourceConfiguration.
@Test
public void testReadRowsWithDataSourceConfiguration() {
PCollection<Row> rows = pipeline.apply(JdbcIO.readRows().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
Schema expectedSchema = Schema.of(Schema.Field.of("NAME", LogicalTypes.variableLengthString(JDBCType.VARCHAR, 500)).withNullable(true), Schema.Field.of("ID", Schema.FieldType.INT32).withNullable(true));
assertEquals(expectedSchema, rows.getSchema());
PCollection<Row> output = rows.apply(Select.fieldNames("NAME", "ID"));
PAssert.that(output).containsInAnyOrder(ImmutableList.of(Row.withSchema(expectedSchema).addValues("Testval1", 1).build()));
pipeline.run();
}
use of org.apache.beam.sdk.schemas.transforms.Select in project beam by apache.
the class JdbcIOTest method testReadRowsWithNumericFields.
@Test
public void testReadRowsWithNumericFields() {
PCollection<Row> rows = pipeline.apply(JdbcIO.readRows().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withQuery(String.format("SELECT CAST(1 AS NUMERIC(1, 0)) AS T1 FROM %s WHERE name = ?", READ_TABLE_NAME)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
Schema expectedSchema = Schema.of(Schema.Field.of("T1", FieldType.logicalType(FixedPrecisionNumeric.of(NUMERIC.getName(), 1, 0)).withNullable(false)));
assertEquals(expectedSchema, rows.getSchema());
PCollection<Row> output = rows.apply(Select.fieldNames("T1"));
PAssert.that(output).containsInAnyOrder(ImmutableList.of(Row.withSchema(expectedSchema).addValues(BigDecimal.valueOf(1)).build()));
pipeline.run();
}
Aggregations