use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.
the class HiveLookupJoinITCase method getLookupFunction.
private FileSystemLookupFunction<HiveTablePartition> getLookupFunction(String tableName) throws Exception {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
HiveLookupTableSource hiveTableSource = (HiveLookupTableSource) FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
FileSystemLookupFunction<HiveTablePartition> lookupFunction = (FileSystemLookupFunction<HiveTablePartition>) hiveTableSource.getLookupFunction(new int[][] { { 0 } });
return lookupFunction;
}
use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.
the class HiveDialectITCase method testParseCommand.
@Test
public void testParseCommand() {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
Parser parser = tableEnvInternal.getParser();
// hive dialect should use HiveParser
assertTrue(parser instanceof HiveParser);
assertThat(parser.parse("HELP").get(0), instanceOf(HelpOperation.class));
assertThat(parser.parse("clear").get(0), instanceOf(ClearOperation.class));
assertThat(parser.parse("SET").get(0), instanceOf(SetOperation.class));
assertThat(parser.parse("ResET").get(0), instanceOf(ResetOperation.class));
assertThat(parser.parse("Exit").get(0), instanceOf(QuitOperation.class));
}
use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.
the class HiveDialectITCase method testPluggableParser.
@Test
public void testPluggableParser() {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
Parser parser = tableEnvInternal.getParser();
// hive dialect should use HiveParser
assertTrue(parser instanceof HiveParser);
// execute some sql and verify the parser instance is reused
tableEnvInternal.executeSql("show databases");
assertSame(parser, tableEnvInternal.getParser());
// switching dialect will result in a new parser
tableEnvInternal.getConfig().setSqlDialect(SqlDialect.DEFAULT);
assertNotEquals(parser.getClass().getName(), tableEnvInternal.getParser().getClass().getName());
}
use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.
the class HiveDialectITCase method testCreateTable.
@Test
public void testCreateTable() throws Exception {
String location = warehouse + "/external_location";
tableEnv.executeSql(String.format("create external table tbl1 (d decimal(10,0),ts timestamp) partitioned by (p string) location '%s' tblproperties('k1'='v1')", location));
Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl1"));
assertEquals(TableType.EXTERNAL_TABLE.toString(), hiveTable.getTableType());
assertEquals(1, hiveTable.getPartitionKeysSize());
assertEquals(location, locationPath(hiveTable.getSd().getLocation()));
assertEquals("v1", hiveTable.getParameters().get("k1"));
assertFalse(hiveTable.getParameters().containsKey(SqlCreateHiveTable.TABLE_LOCATION_URI));
tableEnv.executeSql("create table tbl2 (s struct<ts:timestamp,bin:binary>) stored as orc");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl2"));
assertEquals(TableType.MANAGED_TABLE.toString(), hiveTable.getTableType());
assertEquals(OrcSerde.class.getName(), hiveTable.getSd().getSerdeInfo().getSerializationLib());
assertEquals(OrcInputFormat.class.getName(), hiveTable.getSd().getInputFormat());
assertEquals(OrcOutputFormat.class.getName(), hiveTable.getSd().getOutputFormat());
tableEnv.executeSql("create table tbl3 (m map<timestamp,binary>) partitioned by (p1 bigint,p2 tinyint) " + "row format serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl3"));
assertEquals(2, hiveTable.getPartitionKeysSize());
assertEquals(LazyBinarySerDe.class.getName(), hiveTable.getSd().getSerdeInfo().getSerializationLib());
tableEnv.executeSql("create table tbl4 (x int,y smallint) row format delimited fields terminated by '|' lines terminated by '\n'");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl4"));
assertEquals("|", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.FIELD_DELIM));
assertEquals("|", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.SERIALIZATION_FORMAT));
assertEquals("\n", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.LINE_DELIM));
tableEnv.executeSql("create table tbl5 (m map<bigint,string>) row format delimited collection items terminated by ';' " + "map keys terminated by ':'");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl5"));
assertEquals(";", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.COLLECTION_DELIM));
assertEquals(":", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.MAPKEY_DELIM));
int createdTimeForTableExists = hiveTable.getCreateTime();
tableEnv.executeSql("create table if not exists tbl5 (m map<bigint,string>)");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl5"));
assertEquals(createdTimeForTableExists, hiveTable.getCreateTime());
// test describe table
Parser parser = ((TableEnvironmentInternal) tableEnv).getParser();
DescribeTableOperation operation = (DescribeTableOperation) parser.parse("desc tbl1").get(0);
assertFalse(operation.isExtended());
assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl1"), operation.getSqlIdentifier());
operation = (DescribeTableOperation) parser.parse("describe default.tbl2").get(0);
assertFalse(operation.isExtended());
assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl2"), operation.getSqlIdentifier());
operation = (DescribeTableOperation) parser.parse("describe extended tbl3").get(0);
assertTrue(operation.isExtended());
assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl3"), operation.getSqlIdentifier());
}
use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.
the class HiveDynamicTableFactoryTest method getTableSink.
private DynamicTableSink getTableSink(String tableName) throws Exception {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
return FactoryUtil.createDynamicTableSink((DynamicTableSinkFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
Aggregations