use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class TpcdsTestProgram method main.
public static void main(String[] args) throws Exception {
ParameterTool params = ParameterTool.fromArgs(args);
String sourceTablePath = params.getRequired("sourceTablePath");
String queryPath = params.getRequired("queryPath");
String sinkTablePath = params.getRequired("sinkTablePath");
Boolean useTableStats = params.getBoolean("useTableStats");
TableEnvironment tableEnvironment = prepareTableEnv(sourceTablePath, useTableStats);
// execute TPC-DS queries
for (String queryId : TPCDS_QUERIES) {
System.out.println("[INFO]Run TPC-DS query " + queryId + " ...");
String queryName = QUERY_PREFIX + queryId + QUERY_SUFFIX;
String queryFilePath = queryPath + FILE_SEPARATOR + queryName;
String queryString = loadFile2String(queryFilePath);
Table resultTable = tableEnvironment.sqlQuery(queryString);
// register sink table
String sinkTableName = QUERY_PREFIX + queryId + "_sinkTable";
((TableEnvironmentInternal) tableEnvironment).registerTableSinkInternal(sinkTableName, new CsvTableSink(sinkTablePath + FILE_SEPARATOR + queryId + RESULT_SUFFIX, COL_DELIMITER, 1, FileSystem.WriteMode.OVERWRITE, resultTable.getSchema().getFieldNames(), resultTable.getSchema().getFieldDataTypes()));
TableResult tableResult = resultTable.executeInsert(sinkTableName);
// wait job finish
tableResult.getJobClient().get().getJobExecutionResult().get();
System.out.println("[INFO]Run TPC-DS query " + queryId + " success.");
}
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class BatchSQLTestProgram method main.
public static void main(String[] args) throws Exception {
ParameterTool params = ParameterTool.fromArgs(args);
String outputPath = params.getRequired("outputPath");
String sqlStatement = params.getRequired("sqlStatement");
TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table1", new GeneratorTableSource(10, 100, 60, 0));
((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table2", new GeneratorTableSource(5, 0.2f, 60, 5));
((TableEnvironmentInternal) tEnv).registerTableSinkInternal("sinkTable", new CsvTableSink(outputPath).configure(new String[] { "f0", "f1" }, new TypeInformation[] { Types.INT, Types.SQL_TIMESTAMP }));
TableResult result = tEnv.executeSql(sqlStatement);
// wait job finish
result.getJobClient().get().getJobExecutionResult().get();
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testEmptyStringLiteralParameters.
@Test
public void testEmptyStringLiteralParameters() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.unloadModule("core");
tableEnv.loadModule("hive", new HiveModule());
// UDF
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select regexp_replace('foobar','oo|ar','')").execute().collect());
assertEquals("[fb]", results.toString());
// GenericUDF
results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select length('')").execute().collect());
assertEquals("[0]", results.toString());
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testDecimalReturnType.
@Test
public void testDecimalReturnType() {
TableEnvironment tEnv = HiveTestUtils.createTableEnvInBatchMode();
tEnv.unloadModule("core");
tEnv.loadModule("hive", new HiveModule());
List<Row> results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select negative(5.1)").execute().collect());
assertEquals("[-5.1]", results.toString());
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class JdbcDynamicTableSinkITCase method testBatchSink.
@Test
public void testBatchSink() throws Exception {
TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
tEnv.executeSql("CREATE TABLE USER_RESULT(" + "NAME VARCHAR," + "SCORE BIGINT" + ") WITH ( " + "'connector' = 'jdbc'," + "'url'='" + DB_URL + "'," + "'table-name' = '" + OUTPUT_TABLE3 + "'," + "'sink.buffer-flush.max-rows' = '2'," + "'sink.buffer-flush.interval' = '300ms'," + "'sink.max-retries' = '4'" + ")");
TableResult tableResult = tEnv.executeSql("INSERT INTO USER_RESULT\n" + "SELECT user_name, score " + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), " + "(42, 'Kim'), (1, 'Bob')) " + "AS UserCountTable(score, user_name)");
tableResult.await();
check(new Row[] { Row.of("Bob", 1), Row.of("Tom", 22), Row.of("Kim", 42), Row.of("Kim", 42), Row.of("Bob", 1) }, DB_URL, OUTPUT_TABLE3, new String[] { "NAME", "SCORE" });
}
Aggregations