use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveCatalogITCase method testViewSchema.
@Test
public void testViewSchema() throws Exception {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.DEFAULT);
tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
tableEnv.useCatalog(hiveCatalog.getName());
tableEnv.executeSql("create database db1");
try {
tableEnv.useDatabase("db1");
tableEnv.executeSql("create table src(x int,ts timestamp(3)) with ('connector'='datagen','number-of-rows'='10')");
tableEnv.executeSql("create view v1 as select x,ts from src order by x limit 3");
CatalogView catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v1"));
Schema viewSchema = catalogView.getUnresolvedSchema();
assertThat(viewSchema).isEqualTo(Schema.newBuilder().fromFields(new String[] { "x", "ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP(3) }).build());
List<Row> results = CollectionUtil.iteratorToList(tableEnv.executeSql("select x from v1").collect());
assertThat(results).hasSize(3);
tableEnv.executeSql("create view v2 (v2_x,v2_ts) comment 'v2 comment' as select x,cast(ts as timestamp_ltz(3)) from v1");
catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v2"));
assertThat(catalogView.getUnresolvedSchema()).isEqualTo(Schema.newBuilder().fromFields(new String[] { "v2_x", "v2_ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP_LTZ(3) }).build());
assertThat(catalogView.getComment()).isEqualTo("v2 comment");
results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from v2").collect());
assertThat(results).hasSize(3);
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveTestUtils method createTableEnvWithHiveCatalog.
public static TableEnvironment createTableEnvWithHiveCatalog(HiveCatalog catalog) {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.registerCatalog(catalog.getName(), catalog);
tableEnv.useCatalog(catalog.getName());
return tableEnv;
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveTestUtils method createTableEnvInBatchMode.
public static TableEnvironment createTableEnvInBatchMode(SqlDialect dialect) {
TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM.key(), 1);
tableEnv.getConfig().setSqlDialect(dialect);
return tableEnv;
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testNumberOfBuiltinFunctions.
@Test
public void testNumberOfBuiltinFunctions() {
String hiveVersion = HiveShimLoader.getHiveVersion();
HiveModule hiveModule = new HiveModule(hiveVersion);
verifyNumBuiltInFunctions(hiveVersion, hiveModule);
// creating functions shouldn't change the number of built in functions
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.executeSql("create function myudf as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
tableEnv.executeSql("create function mygenericudf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs'");
tableEnv.executeSql("create function myudaf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax'");
tableEnv.executeSql("create function myudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
verifyNumBuiltInFunctions(hiveVersion, hiveModule);
// explicitly verify that HiveModule doesn't consider the created functions as built-in
// functions
assertFalse(hiveModule.getFunctionDefinition("myudf").isPresent());
assertFalse(hiveModule.getFunctionDefinition("mygenericudf").isPresent());
assertFalse(hiveModule.getFunctionDefinition("myudaf").isPresent());
assertFalse(hiveModule.getFunctionDefinition("myudtf").isPresent());
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testConstantArguments.
@Test
public void testConstantArguments() {
TableEnvironment tEnv = HiveTestUtils.createTableEnvInBatchMode();
tEnv.unloadModule("core");
tEnv.loadModule("hive", new HiveModule());
List<Row> results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('an', 'bn')").execute().collect());
assertEquals("[anbn]", results.toString());
results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('ab', cast('cdefghi' as varchar(5)))").execute().collect());
assertEquals("[abcdefg]", results.toString());
results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('ab',cast(12.34 as decimal(10,5)))").execute().collect());
assertEquals("[ab12.34]", results.toString());
results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat(cast('2018-01-19' as date),cast('2019-12-27 17:58:23.385' as timestamp))").execute().collect());
assertEquals("[2018-01-192019-12-27 17:58:23.385]", results.toString());
// TODO: null cannot be a constant argument at the moment. This test will make more sense
// when that changes.
results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('ab',cast(null as int))").execute().collect());
assertEquals("[null]", results.toString());
}
Aggregations