Search in sources :

Example 41 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TpcdsTestProgram method main.

public static void main(String[] args) throws Exception {
    ParameterTool params = ParameterTool.fromArgs(args);
    String sourceTablePath = params.getRequired("sourceTablePath");
    String queryPath = params.getRequired("queryPath");
    String sinkTablePath = params.getRequired("sinkTablePath");
    Boolean useTableStats = params.getBoolean("useTableStats");
    TableEnvironment tableEnvironment = prepareTableEnv(sourceTablePath, useTableStats);
    // execute TPC-DS queries
    for (String queryId : TPCDS_QUERIES) {
        System.out.println("[INFO]Run TPC-DS query " + queryId + " ...");
        String queryName = QUERY_PREFIX + queryId + QUERY_SUFFIX;
        String queryFilePath = queryPath + FILE_SEPARATOR + queryName;
        String queryString = loadFile2String(queryFilePath);
        Table resultTable = tableEnvironment.sqlQuery(queryString);
        // register sink table
        String sinkTableName = QUERY_PREFIX + queryId + "_sinkTable";
        ((TableEnvironmentInternal) tableEnvironment).registerTableSinkInternal(sinkTableName, new CsvTableSink(sinkTablePath + FILE_SEPARATOR + queryId + RESULT_SUFFIX, COL_DELIMITER, 1, FileSystem.WriteMode.OVERWRITE, resultTable.getSchema().getFieldNames(), resultTable.getSchema().getFieldDataTypes()));
        TableResult tableResult = resultTable.executeInsert(sinkTableName);
        // wait job finish
        tableResult.getJobClient().get().getJobExecutionResult().get();
        System.out.println("[INFO]Run TPC-DS query " + queryId + " success.");
    }
}
Also used : ParameterTool(org.apache.flink.api.java.utils.ParameterTool) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) Table(org.apache.flink.table.api.Table) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) TableResult(org.apache.flink.table.api.TableResult) CsvTableSink(org.apache.flink.table.sinks.CsvTableSink) TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 42 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class BatchSQLTestProgram method main.

public static void main(String[] args) throws Exception {
    ParameterTool params = ParameterTool.fromArgs(args);
    String outputPath = params.getRequired("outputPath");
    String sqlStatement = params.getRequired("sqlStatement");
    TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
    ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table1", new GeneratorTableSource(10, 100, 60, 0));
    ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table2", new GeneratorTableSource(5, 0.2f, 60, 5));
    ((TableEnvironmentInternal) tEnv).registerTableSinkInternal("sinkTable", new CsvTableSink(outputPath).configure(new String[] { "f0", "f1" }, new TypeInformation[] { Types.INT, Types.SQL_TIMESTAMP }));
    TableResult result = tEnv.executeSql(sqlStatement);
    // wait job finish
    result.getJobClient().get().getJobExecutionResult().get();
}
Also used : ParameterTool(org.apache.flink.api.java.utils.ParameterTool) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) TableResult(org.apache.flink.table.api.TableResult) CsvTableSink(org.apache.flink.table.sinks.CsvTableSink) TableEnvironment(org.apache.flink.table.api.TableEnvironment) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation)

Example 43 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveModuleTest method testEmptyStringLiteralParameters.

@Test
public void testEmptyStringLiteralParameters() {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
    tableEnv.unloadModule("core");
    tableEnv.loadModule("hive", new HiveModule());
    // UDF
    List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select regexp_replace('foobar','oo|ar','')").execute().collect());
    assertEquals("[fb]", results.toString());
    // GenericUDF
    results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select length('')").execute().collect());
    assertEquals("[0]", results.toString());
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 44 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveModuleTest method testDecimalReturnType.

@Test
public void testDecimalReturnType() {
    TableEnvironment tEnv = HiveTestUtils.createTableEnvInBatchMode();
    tEnv.unloadModule("core");
    tEnv.loadModule("hive", new HiveModule());
    List<Row> results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select negative(5.1)").execute().collect());
    assertEquals("[-5.1]", results.toString());
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 45 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class JdbcDynamicTableSinkITCase method testBatchSink.

@Test
public void testBatchSink() throws Exception {
    TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
    tEnv.executeSql("CREATE TABLE USER_RESULT(" + "NAME VARCHAR," + "SCORE BIGINT" + ") WITH ( " + "'connector' = 'jdbc'," + "'url'='" + DB_URL + "'," + "'table-name' = '" + OUTPUT_TABLE3 + "'," + "'sink.buffer-flush.max-rows' = '2'," + "'sink.buffer-flush.interval' = '300ms'," + "'sink.max-retries' = '4'" + ")");
    TableResult tableResult = tEnv.executeSql("INSERT INTO USER_RESULT\n" + "SELECT user_name, score " + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), " + "(42, 'Kim'), (1, 'Bob')) " + "AS UserCountTable(score, user_name)");
    tableResult.await();
    check(new Row[] { Row.of("Bob", 1), Row.of("Tom", 22), Row.of("Kim", 42), Row.of("Kim", 42), Row.of("Bob", 1) }, DB_URL, OUTPUT_TABLE3, new String[] { "NAME", "SCORE" });
}
Also used : TableResult(org.apache.flink.table.api.TableResult) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4