use of org.apache.beam.sdk.values.Row in project beam by apache.
the class JdbcIOTest method testWriteWithoutPreparedStatementWithReadRows.
@Test
public void testWriteWithoutPreparedStatementWithReadRows() throws Exception {
SerializableFunction<Void, DataSource> dataSourceProvider = ignored -> DATA_SOURCE;
PCollection<Row> rows = pipeline.apply(JdbcIO.readRows().withDataSourceProviderFn(dataSourceProvider).withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
String writeTableName = DatabaseTestHelper.getTestTableName("UT_WRITE_PS_WITH_READ_ROWS");
DatabaseTestHelper.createTable(DATA_SOURCE, writeTableName);
try {
rows.apply(JdbcIO.<Row>write().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withBatchSize(10L).withTable(writeTableName));
pipeline.run();
} finally {
DatabaseTestHelper.deleteTable(DATA_SOURCE, writeTableName);
}
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class JdbcIOTest method getRowsToWrite.
private static ArrayList<Row> getRowsToWrite(long rowsToAdd, Schema schema) {
ArrayList<Row> data = new ArrayList<>();
for (int i = 0; i < rowsToAdd; i++) {
Row row = schema.getFields().stream().map(field -> dummyFieldValue(field.getType())).collect(Row.toRow(schema));
data.add(row);
}
return data;
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class JdbcIOTest method testWriteWithoutPreparedStatement.
@Test
public void testWriteWithoutPreparedStatement() throws Exception {
final int rowsToAdd = 10;
Schema.Builder schemaBuilder = Schema.builder();
schemaBuilder.addField(Schema.Field.of("column_boolean", Schema.FieldType.BOOLEAN));
schemaBuilder.addField(Schema.Field.of("column_string", Schema.FieldType.STRING));
schemaBuilder.addField(Schema.Field.of("column_int", Schema.FieldType.INT32));
schemaBuilder.addField(Schema.Field.of("column_long", Schema.FieldType.INT64));
schemaBuilder.addField(Schema.Field.of("column_float", Schema.FieldType.FLOAT));
schemaBuilder.addField(Schema.Field.of("column_double", Schema.FieldType.DOUBLE));
schemaBuilder.addField(Schema.Field.of("column_bigdecimal", Schema.FieldType.DECIMAL));
schemaBuilder.addField(Schema.Field.of("column_date", LogicalTypes.JDBC_DATE_TYPE));
schemaBuilder.addField(Schema.Field.of("column_time", LogicalTypes.JDBC_TIME_TYPE));
schemaBuilder.addField(Schema.Field.of("column_timestamptz", LogicalTypes.JDBC_TIMESTAMP_WITH_TIMEZONE_TYPE));
schemaBuilder.addField(Schema.Field.of("column_timestamp", Schema.FieldType.DATETIME));
schemaBuilder.addField(Schema.Field.of("column_short", Schema.FieldType.INT16));
schemaBuilder.addField(Schema.Field.of("column_blob", FieldType.BYTES));
schemaBuilder.addField(Schema.Field.of("column_clob", FieldType.STRING));
schemaBuilder.addField(Schema.Field.of("column_uuid", LogicalTypes.JDBC_UUID_TYPE));
Schema schema = schemaBuilder.build();
try (Connection connection = DATA_SOURCE.getConnection()) {
try (Statement statement = connection.createStatement()) {
statement.execute("CREATE TYPE UUID EXTERNAL NAME 'java.util.UUID' LANGUAGE JAVA");
}
}
String tableName = DatabaseTestHelper.getTestTableName("UT_WRITE_PS");
StringBuilder stmt = new StringBuilder("CREATE TABLE ");
stmt.append(tableName);
stmt.append(" (");
// boolean
stmt.append("column_boolean BOOLEAN,");
// String
stmt.append("column_string VARCHAR(254),");
// int
stmt.append("column_int INTEGER,");
// long
stmt.append("column_long BIGINT,");
// float
stmt.append("column_float REAL,");
// double
stmt.append("column_double DOUBLE PRECISION,");
// BigDecimal
stmt.append("column_bigdecimal DECIMAL(13,0),");
// Date
stmt.append("column_date DATE,");
// Time
stmt.append("column_time TIME,");
// Timestamp
stmt.append("column_timestamptz TIMESTAMP,");
// Timestamp
stmt.append("column_timestamp TIMESTAMP,");
// short
stmt.append("column_short SMALLINT,");
// blob
stmt.append("column_blob BLOB,");
// clob
stmt.append("column_clob CLOB,");
// uuid
stmt.append("column_uuid UUID");
stmt.append(" )");
DatabaseTestHelper.createTableWithStatement(DATA_SOURCE, stmt.toString());
try {
ArrayList<Row> data = getRowsToWrite(rowsToAdd, schema);
pipeline.apply(Create.of(data)).setRowSchema(schema).apply(JdbcIO.<Row>write().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withBatchSize(10L).withTable(tableName));
pipeline.run();
assertRowCount(DATA_SOURCE, tableName, rowsToAdd);
} finally {
DatabaseTestHelper.deleteTable(DATA_SOURCE, tableName);
}
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class JdbcIOTest method testGetPreparedStatementSetCallerForArray.
@Test
public void testGetPreparedStatementSetCallerForArray() throws Exception {
Schema schema = Schema.builder().addField("string_array_col", Schema.FieldType.array(Schema.FieldType.STRING)).build();
List<String> stringList = Arrays.asList("string 1", "string 2");
Row row = Row.withSchema(schema).addValues(stringList).build();
PreparedStatement psMocked = mock(PreparedStatement.class);
Connection connectionMocked = mock(Connection.class);
Array arrayMocked = mock(Array.class);
when(psMocked.getConnection()).thenReturn(connectionMocked);
when(connectionMocked.createArrayOf(anyString(), any())).thenReturn(arrayMocked);
JdbcUtil.getPreparedStatementSetCaller(Schema.FieldType.array(Schema.FieldType.STRING)).set(row, psMocked, 0, SchemaUtil.FieldWithIndex.of(schema.getField(0), 0));
verify(psMocked, times(1)).setArray(1, arrayMocked);
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class JdbcIOTest method testReadWithSchema.
@Test
public void testReadWithSchema() {
SerializableFunction<Void, DataSource> dataSourceProvider = ignored -> DATA_SOURCE;
JdbcIO.RowMapper<RowWithSchema> rowMapper = rs -> new RowWithSchema(rs.getString("NAME"), rs.getInt("ID"));
pipeline.getSchemaRegistry().registerJavaBean(RowWithSchema.class);
PCollection<RowWithSchema> rows = pipeline.apply(JdbcIO.<RowWithSchema>read().withDataSourceProviderFn(dataSourceProvider).withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME)).withRowMapper(rowMapper).withCoder(SerializableCoder.of(RowWithSchema.class)).withStatementPreparator(preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1))));
Schema expectedSchema = Schema.of(Schema.Field.of("name", Schema.FieldType.STRING), Schema.Field.of("id", Schema.FieldType.INT32));
assertEquals(expectedSchema, rows.getSchema());
PCollection<Row> output = rows.apply(Select.fieldNames("name", "id"));
PAssert.that(output).containsInAnyOrder(ImmutableList.of(Row.withSchema(expectedSchema).addValues("Testval1", 1).build()));
pipeline.run();
}
Aggregations