Search in sources :

Example 16 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class DataStreamJavaITCase method testToDataStreamCustomEventTime.

@Test
public void testToDataStreamCustomEventTime() throws Exception {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final TableConfig tableConfig = tableEnv.getConfig();
    // session time zone should not have an impact on the conversion
    final ZoneId originalZone = tableConfig.getLocalTimeZone();
    tableConfig.setLocalTimeZone(ZoneId.of("Europe/Berlin"));
    final LocalDateTime localDateTime1 = LocalDateTime.parse("1970-01-01T00:00:00.000");
    final LocalDateTime localDateTime2 = LocalDateTime.parse("1970-01-01T01:00:00.000");
    final DataStream<Tuple2<LocalDateTime, String>> dataStream = env.fromElements(new Tuple2<>(localDateTime1, "alice"), new Tuple2<>(localDateTime2, "bob"));
    final Table table = tableEnv.fromDataStream(dataStream, Schema.newBuilder().column("f0", "TIMESTAMP(3)").column("f1", "STRING").watermark("f0", "SOURCE_WATERMARK()").build());
    testSchema(table, new ResolvedSchema(Arrays.asList(Column.physical("f0", TIMESTAMP(3)), Column.physical("f1", STRING())), Collections.singletonList(WatermarkSpec.of("f0", ResolvedExpressionMock.of(TIMESTAMP(3), "`SOURCE_WATERMARK`()"))), null));
    final DataStream<Long> rowtimeStream = tableEnv.toDataStream(table).process(new ProcessFunction<Row, Long>() {

        @Override
        public void processElement(Row value, Context ctx, Collector<Long> out) {
            out.collect(ctx.timestamp());
        }
    });
    testResult(rowtimeStream, localDateTime1.atOffset(ZoneOffset.UTC).toInstant().toEpochMilli(), localDateTime2.atOffset(ZoneOffset.UTC).toInstant().toEpochMilli());
    tableConfig.setLocalTimeZone(originalZone);
}
Also used : LocalDateTime(java.time.LocalDateTime) Table(org.apache.flink.table.api.Table) ZoneId(java.time.ZoneId) Tuple2(org.apache.flink.api.java.tuple.Tuple2) TableConfig(org.apache.flink.table.api.TableConfig) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Test(org.junit.Test)

Example 17 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class GroupingSetsITCase method setupTables.

@Before
public void setupTables() {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    tableEnv = TableEnvironment.getTableEnvironment(env, new TableConfig());
    DataSet<Tuple3<Integer, Long, String>> dataSet = CollectionDataSets.get3TupleDataSet(env);
    tableEnv.registerDataSet(TABLE_NAME, dataSet);
    MapOperator<Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>> dataSetWithNulls = dataSet.map(new MapFunction<Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>>() {

        @Override
        public Tuple3<Integer, Long, String> map(Tuple3<Integer, Long, String> value) throws Exception {
            if (value.f2.toLowerCase().contains("world")) {
                value.f2 = null;
            }
            return value;
        }
    });
    tableEnv.registerDataSet(TABLE_WITH_NULLS_NAME, dataSetWithNulls);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple3(org.apache.flink.api.java.tuple.Tuple3) TableConfig(org.apache.flink.table.api.TableConfig) Before(org.junit.Before)

Example 18 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class HBaseConnectorITCase method testTableSourceFullScan.

// ######## HBaseTableSource tests ############
@Test
public void testTableSourceFullScan() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(4);
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, new TableConfig());
    HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE);
    hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class);
    hbaseTable.addColumn(FAMILY2, F2COL1, String.class);
    hbaseTable.addColumn(FAMILY2, F2COL2, Long.class);
    hbaseTable.addColumn(FAMILY3, F3COL1, Double.class);
    hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class);
    hbaseTable.addColumn(FAMILY3, F3COL3, String.class);
    tableEnv.registerTableSource("hTable", hbaseTable);
    Table result = tableEnv.sql("SELECT " + "  h.family1.col1, " + "  h.family2.col1, " + "  h.family2.col2, " + "  h.family3.col1, " + "  h.family3.col2, " + "  h.family3.col3 " + "FROM hTable AS h");
    DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
    List<Row> results = resultSet.collect();
    String expected = "10,Hello-1,100,1.01,false,Welt-1\n" + "20,Hello-2,200,2.02,true,Welt-2\n" + "30,Hello-3,300,3.03,false,Welt-3\n" + "40,null,400,4.04,true,Welt-4\n" + "50,Hello-5,500,5.05,false,Welt-5\n" + "60,Hello-6,600,6.06,true,Welt-6\n" + "70,Hello-7,700,7.07,false,Welt-7\n" + "80,null,800,8.08,true,Welt-8\n";
    TestBaseUtils.compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) HTable(org.apache.hadoop.hbase.client.HTable) TableConfig(org.apache.flink.table.api.TableConfig) Row(org.apache.flink.types.Row) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 19 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class HBaseConnectorITCase method testTableSourceProjection.

@Test
public void testTableSourceProjection() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(4);
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, new TableConfig());
    HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE);
    hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class);
    hbaseTable.addColumn(FAMILY2, F2COL1, String.class);
    hbaseTable.addColumn(FAMILY2, F2COL2, Long.class);
    hbaseTable.addColumn(FAMILY3, F3COL1, Double.class);
    hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class);
    hbaseTable.addColumn(FAMILY3, F3COL3, String.class);
    tableEnv.registerTableSource("hTable", hbaseTable);
    Table result = tableEnv.sql("SELECT " + "  h.family1.col1, " + "  h.family3.col1, " + "  h.family3.col2, " + "  h.family3.col3 " + "FROM hTable AS h");
    DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
    List<Row> results = resultSet.collect();
    String expected = "10,1.01,false,Welt-1\n" + "20,2.02,true,Welt-2\n" + "30,3.03,false,Welt-3\n" + "40,4.04,true,Welt-4\n" + "50,5.05,false,Welt-5\n" + "60,6.06,true,Welt-6\n" + "70,7.07,false,Welt-7\n" + "80,8.08,true,Welt-8\n";
    TestBaseUtils.compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) HTable(org.apache.hadoop.hbase.client.HTable) TableConfig(org.apache.flink.table.api.TableConfig) Row(org.apache.flink.types.Row) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 20 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class HBaseConnectorITCase method testTableSourceReadAsByteArray.

@Test
public void testTableSourceReadAsByteArray() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(4);
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, new TableConfig());
    // fetch row2 from the table till the end
    HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE);
    hbaseTable.addColumn(FAMILY2, F2COL1, byte[].class);
    hbaseTable.addColumn(FAMILY2, F2COL2, byte[].class);
    tableEnv.registerTableSource("hTable", hbaseTable);
    tableEnv.registerFunction("toUTF8", new ToUTF8());
    tableEnv.registerFunction("toLong", new ToLong());
    Table result = tableEnv.sql("SELECT " + "  toUTF8(h.family2.col1), " + "  toLong(h.family2.col2) " + "FROM hTable AS h");
    DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
    List<Row> results = resultSet.collect();
    String expected = "Hello-1,100\n" + "Hello-2,200\n" + "Hello-3,300\n" + "null,400\n" + "Hello-5,500\n" + "Hello-6,600\n" + "Hello-7,700\n" + "null,800\n";
    TestBaseUtils.compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) HTable(org.apache.hadoop.hbase.client.HTable) TableConfig(org.apache.flink.table.api.TableConfig) Row(org.apache.flink.types.Row) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Aggregations

TableConfig (org.apache.flink.table.api.TableConfig)41 RowType (org.apache.flink.table.types.logical.RowType)19 Test (org.junit.Test)10 ArrayList (java.util.ArrayList)6 RexNode (org.apache.calcite.rex.RexNode)6 CatalogManager (org.apache.flink.table.catalog.CatalogManager)6 IOException (java.io.IOException)5 OutputStream (java.io.OutputStream)5 PrintStream (java.io.PrintStream)5 Arrays (java.util.Arrays)5 Collections (java.util.Collections)5 List (java.util.List)5 RexBuilder (org.apache.calcite.rex.RexBuilder)5 RexInputRef (org.apache.calcite.rex.RexInputRef)5 Table (org.apache.flink.table.api.Table)5 FlinkTypeFactory (org.apache.flink.table.planner.calcite.FlinkTypeFactory)5 IntType (org.apache.flink.table.types.logical.IntType)5 Row (org.apache.flink.types.Row)5 Random (java.util.Random)4 Consumer (java.util.function.Consumer)4