use of org.apache.beam.sdk.values.Row in project beam by apache.
the class BeamSalUhfSpecialTypeAndValueTest method testSHA512.
@Test
public void testSHA512() throws Exception {
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow1 = Row.withSchema(resultType).addValues(DigestUtils.sha512("foobar".getBytes(UTF_8))).build();
Row resultRow2 = Row.withSchema(resultType).addValues(DigestUtils.sha512(" ".getBytes(UTF_8))).build();
Row resultRow3 = Row.withSchema(resultType).addValues(DigestUtils.sha512("abcABCжщфЖЩФ".getBytes(UTF_8))).build();
String sql = "SELECT SHA512(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class BeamSalUhfSpecialTypeAndValueTest method testReverse.
@Test
public void testReverse() throws Exception {
byte[] testByets = "абвгд".getBytes(UTF_8);
ArrayUtils.reverse(testByets);
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow = Row.withSchema(resultType).addValues(testByets).build();
Row resultRow2 = Row.withSchema(resultType).addValues("\1\0".getBytes(UTF_8)).build();
Row resultRow3 = Row.withSchema(resultType).addValues("".getBytes(UTF_8)).build();
String sql = "SELECT REVERSE(f_bytes) FROM PCOLLECTION WHERE f_func = 'LENGTH'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class BeamCalcRelTest method testNoFieldAccess.
@Test
public void testNoFieldAccess() throws IllegalAccessException {
String sql = "SELECT 1 FROM ORDER_DETAILS_BOUNDED";
PCollection<Row> rows = compilePipeline(sql, pipeline);
final NodeGetter nodeGetter = new NodeGetter(rows);
pipeline.traverseTopologically(nodeGetter);
ParDo.MultiOutput<Row, Row> pardo = (ParDo.MultiOutput<Row, Row>) nodeGetter.producer.getTransform();
PCollection<Row> input = (PCollection<Row>) Iterables.getOnlyElement(nodeGetter.producer.getInputs().values());
DoFnSchemaInformation info = ParDo.getDoFnSchemaInformation(pardo.getFn(), input);
FieldAccessDescriptor fieldAccess = info.getFieldAccessDescriptor();
Assert.assertFalse(fieldAccess.getAllFields());
Assert.assertTrue(fieldAccess.getFieldsAccessed().isEmpty());
Assert.assertTrue(fieldAccess.getNestedFieldsAccessed().isEmpty());
pipeline.run().waitUntilFinish();
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class BeamMinusRelTest method testExceptAll.
@Test
public void testExceptAll() {
String sql = "";
sql += "SELECT order_id, site_id, price " + "FROM ORDER_DETAILS1 " + " EXCEPT ALL " + "SELECT order_id, site_id, price " + "FROM ORDER_DETAILS2 ";
PCollection<Row> rows = compilePipeline(sql, pipeline);
PAssert.that(rows).satisfies(new CheckSize(3));
PAssert.that(rows).containsInAnyOrder(TestUtils.RowsBuilder.of(Schema.FieldType.INT64, "order_id", Schema.FieldType.INT32, "site_id", Schema.FieldType.DECIMAL, "price").addRows(1L, 1, new BigDecimal(1.0), 4L, 4, new BigDecimal(4.0), 4L, 4, new BigDecimal(4.0)).getRows());
pipeline.run();
}
use of org.apache.beam.sdk.values.Row in project beam by apache.
the class BeamSqlRowCoderTest method encodeAndDecode.
@Test
public void encodeAndDecode() throws Exception {
RelDataType relDataType = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT).builder().add("col_tinyint", SqlTypeName.TINYINT).add("col_smallint", SqlTypeName.SMALLINT).add("col_integer", SqlTypeName.INTEGER).add("col_bigint", SqlTypeName.BIGINT).add("col_float", SqlTypeName.FLOAT).add("col_double", SqlTypeName.DOUBLE).add("col_decimal", SqlTypeName.DECIMAL).add("col_string_varchar", SqlTypeName.VARCHAR).add("col_time", SqlTypeName.TIME).add("col_date", SqlTypeName.DATE).add("col_timestamp_with_local_time_zone", SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE).add("col_timestamp", SqlTypeName.TIMESTAMP).add("col_boolean", SqlTypeName.BOOLEAN).build();
Schema beamSchema = CalciteUtils.toSchema(relDataType);
Row row = Row.withSchema(beamSchema).addValues(Byte.valueOf("1"), Short.valueOf("1"), 1, 1L, 1.1F, 1.1, BigDecimal.ZERO, "hello", LocalTime.now(), LocalDate.now(), LocalDateTime.now(), DateTime.now().toInstant(), true).build();
Coder<Row> coder = SchemaCoder.of(beamSchema);
CoderProperties.coderDecodeEncodeEqual(coder, row);
}
Aggregations