Search in sources :

Example 1 with BatchTableEnvironment

use of org.apache.flink.table.api.java.BatchTableEnvironment in project flink by apache.

the class HBaseConnectorITCase method testTableSourceFieldOrder.

@Test
public void testTableSourceFieldOrder() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(4);
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, new TableConfig());
    HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE);
    // shuffle order of column registration
    hbaseTable.addColumn(FAMILY2, F2COL1, String.class);
    hbaseTable.addColumn(FAMILY3, F3COL1, Double.class);
    hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class);
    hbaseTable.addColumn(FAMILY2, F2COL2, Long.class);
    hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class);
    hbaseTable.addColumn(FAMILY3, F3COL3, String.class);
    tableEnv.registerTableSource("hTable", hbaseTable);
    Table result = tableEnv.sql("SELECT * FROM hTable AS h");
    DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
    List<Row> results = resultSet.collect();
    String expected = "Hello-1,100,1.01,false,Welt-1,10\n" + "Hello-2,200,2.02,true,Welt-2,20\n" + "Hello-3,300,3.03,false,Welt-3,30\n" + "null,400,4.04,true,Welt-4,40\n" + "Hello-5,500,5.05,false,Welt-5,50\n" + "Hello-6,600,6.06,true,Welt-6,60\n" + "Hello-7,700,7.07,false,Welt-7,70\n" + "null,800,8.08,true,Welt-8,80\n";
    TestBaseUtils.compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) HTable(org.apache.hadoop.hbase.client.HTable) TableConfig(org.apache.flink.table.api.TableConfig) Row(org.apache.flink.types.Row) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 2 with BatchTableEnvironment

use of org.apache.flink.table.api.java.BatchTableEnvironment in project flink by apache.

the class WordCountTable method main.

// *************************************************************************
//     PROGRAM
// *************************************************************************
public static void main(String[] args) throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
    BatchTableEnvironment tEnv = TableEnvironment.getTableEnvironment(env);
    DataSet<WC> input = env.fromElements(new WC("Hello", 1), new WC("Ciao", 1), new WC("Hello", 1));
    Table table = tEnv.fromDataSet(input);
    Table filtered = table.groupBy("word").select("word, frequency.sum as frequency").filter("frequency = 2");
    DataSet<WC> result = tEnv.toDataSet(filtered, WC.class);
    result.print();
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment)

Example 3 with BatchTableEnvironment

use of org.apache.flink.table.api.java.BatchTableEnvironment in project flink by apache.

the class TableEnvironmentITCase method testAsFromPojo.

@Test
public void testAsFromPojo() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
    List<SmallPojo> data = new ArrayList<>();
    data.add(new SmallPojo("Peter", 28, 4000.00, "Sales"));
    data.add(new SmallPojo("Anna", 56, 10000.00, "Engineering"));
    data.add(new SmallPojo("Lucy", 42, 6000.00, "HR"));
    Table table = tableEnv.fromDataSet(env.fromCollection(data), "department AS a, " + "age AS b, " + "salary AS c, " + "name AS d").select("a, b, c, d");
    DataSet<Row> ds = tableEnv.toDataSet(table, Row.class);
    List<Row> results = ds.collect();
    String expected = "Sales,28,4000.0,Peter\n" + "Engineering,56,10000.0,Anna\n" + "HR,42,6000.0,Lucy\n";
    compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) ArrayList(java.util.ArrayList) Row(org.apache.flink.types.Row) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 4 with BatchTableEnvironment

use of org.apache.flink.table.api.java.BatchTableEnvironment in project flink by apache.

the class TableEnvironmentITCase method testRegisterWithFields.

@Test
public void testRegisterWithFields() throws Exception {
    final String tableName = "MyTable";
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
    DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
    tableEnv.registerDataSet(tableName, ds, "a, b, c");
    Table t = tableEnv.scan(tableName);
    Table result = t.select("a, b, c");
    DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class);
    List<Row> results = resultSet.collect();
    String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n" + "4,3,Hello world, how are you?\n" + "5,3,I am fine.\n" + "6,3,Luke Skywalker\n" + "7,4,Comment#1\n" + "8,4,Comment#2\n" + "9,4,Comment#3\n" + "10,4,Comment#4\n" + "11,5,Comment#5\n" + "12,5,Comment#6\n" + "13,5,Comment#7\n" + "14,5,Comment#8\n" + "15,5,Comment#9\n" + "16,6,Comment#10\n" + "17,6,Comment#11\n" + "18,6,Comment#12\n" + "19,6,Comment#13\n" + "20,6,Comment#14\n" + "21,6,Comment#15\n";
    compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) Tuple3(org.apache.flink.api.java.tuple.Tuple3) Row(org.apache.flink.types.Row) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Example 5 with BatchTableEnvironment

use of org.apache.flink.table.api.java.BatchTableEnvironment in project flink by apache.

the class TableEnvironmentITCase method testAsFromAndToPojo.

@Test
public void testAsFromAndToPojo() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
    List<SmallPojo> data = new ArrayList<>();
    data.add(new SmallPojo("Peter", 28, 4000.00, "Sales"));
    data.add(new SmallPojo("Anna", 56, 10000.00, "Engineering"));
    data.add(new SmallPojo("Lucy", 42, 6000.00, "HR"));
    Table table = tableEnv.fromDataSet(env.fromCollection(data), "department AS a, " + "age AS b, " + "salary AS c, " + "name AS d").select("a, b, c, d");
    DataSet<SmallPojo2> ds = tableEnv.toDataSet(table, SmallPojo2.class);
    List<SmallPojo2> results = ds.collect();
    String expected = "Sales,28,4000.0,Peter\n" + "Engineering,56,10000.0,Anna\n" + "HR,42,6000.0,Lucy\n";
    compareResultAsText(results, expected);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Table(org.apache.flink.table.api.Table) ArrayList(java.util.ArrayList) BatchTableEnvironment(org.apache.flink.table.api.java.BatchTableEnvironment) Test(org.junit.Test)

Aggregations

ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)38 BatchTableEnvironment (org.apache.flink.table.api.java.BatchTableEnvironment)38 Test (org.junit.Test)36 Table (org.apache.flink.table.api.Table)30 Row (org.apache.flink.types.Row)19 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)12 ArrayList (java.util.ArrayList)6 TableConfig (org.apache.flink.table.api.TableConfig)4 HTable (org.apache.hadoop.hbase.client.HTable)4 Tuple5 (org.apache.flink.api.java.tuple.Tuple5)2 BatchTableSource (org.apache.flink.table.sources.BatchTableSource)2 HashMap (java.util.HashMap)1 Tuple4 (org.apache.flink.api.java.tuple.Tuple4)1 TupleTypeInfo (org.apache.flink.api.java.typeutils.TupleTypeInfo)1 CalciteConfig (org.apache.flink.table.calcite.CalciteConfig)1 CalciteConfigBuilder (org.apache.flink.table.calcite.CalciteConfigBuilder)1