Search in sources :

Example 1 with TableEnvironmentInternal

use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.

the class HiveLookupJoinITCase method getLookupFunction.

private FileSystemLookupFunction<HiveTablePartition> getLookupFunction(String tableName) throws Exception {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
    HiveLookupTableSource hiveTableSource = (HiveLookupTableSource) FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
    FileSystemLookupFunction<HiveTablePartition> lookupFunction = (FileSystemLookupFunction<HiveTablePartition>) hiveTableSource.getLookupFunction(new int[][] { { 0 } });
    return lookupFunction;
}
Also used : DynamicTableSourceFactory(org.apache.flink.table.factories.DynamicTableSourceFactory) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 2 with TableEnvironmentInternal

use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.

the class HiveDialectITCase method testParseCommand.

@Test
public void testParseCommand() {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    Parser parser = tableEnvInternal.getParser();
    // hive dialect should use HiveParser
    assertTrue(parser instanceof HiveParser);
    assertThat(parser.parse("HELP").get(0), instanceOf(HelpOperation.class));
    assertThat(parser.parse("clear").get(0), instanceOf(ClearOperation.class));
    assertThat(parser.parse("SET").get(0), instanceOf(SetOperation.class));
    assertThat(parser.parse("ResET").get(0), instanceOf(ResetOperation.class));
    assertThat(parser.parse("Exit").get(0), instanceOf(QuitOperation.class));
}
Also used : ResetOperation(org.apache.flink.table.operations.command.ResetOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) HiveParser(org.apache.flink.table.planner.delegation.hive.HiveParser) ClearOperation(org.apache.flink.table.operations.command.ClearOperation) HiveParser(org.apache.flink.table.planner.delegation.hive.HiveParser) Parser(org.apache.flink.table.delegation.Parser) HelpOperation(org.apache.flink.table.operations.command.HelpOperation) QuitOperation(org.apache.flink.table.operations.command.QuitOperation) Test(org.junit.Test)

Example 3 with TableEnvironmentInternal

use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.

the class HiveDialectITCase method testPluggableParser.

@Test
public void testPluggableParser() {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    Parser parser = tableEnvInternal.getParser();
    // hive dialect should use HiveParser
    assertTrue(parser instanceof HiveParser);
    // execute some sql and verify the parser instance is reused
    tableEnvInternal.executeSql("show databases");
    assertSame(parser, tableEnvInternal.getParser());
    // switching dialect will result in a new parser
    tableEnvInternal.getConfig().setSqlDialect(SqlDialect.DEFAULT);
    assertNotEquals(parser.getClass().getName(), tableEnvInternal.getParser().getClass().getName());
}
Also used : TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) HiveParser(org.apache.flink.table.planner.delegation.hive.HiveParser) HiveParser(org.apache.flink.table.planner.delegation.hive.HiveParser) Parser(org.apache.flink.table.delegation.Parser) Test(org.junit.Test)

Example 4 with TableEnvironmentInternal

use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.

the class HiveDialectITCase method testCreateTable.

@Test
public void testCreateTable() throws Exception {
    String location = warehouse + "/external_location";
    tableEnv.executeSql(String.format("create external table tbl1 (d decimal(10,0),ts timestamp) partitioned by (p string) location '%s' tblproperties('k1'='v1')", location));
    Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl1"));
    assertEquals(TableType.EXTERNAL_TABLE.toString(), hiveTable.getTableType());
    assertEquals(1, hiveTable.getPartitionKeysSize());
    assertEquals(location, locationPath(hiveTable.getSd().getLocation()));
    assertEquals("v1", hiveTable.getParameters().get("k1"));
    assertFalse(hiveTable.getParameters().containsKey(SqlCreateHiveTable.TABLE_LOCATION_URI));
    tableEnv.executeSql("create table tbl2 (s struct<ts:timestamp,bin:binary>) stored as orc");
    hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl2"));
    assertEquals(TableType.MANAGED_TABLE.toString(), hiveTable.getTableType());
    assertEquals(OrcSerde.class.getName(), hiveTable.getSd().getSerdeInfo().getSerializationLib());
    assertEquals(OrcInputFormat.class.getName(), hiveTable.getSd().getInputFormat());
    assertEquals(OrcOutputFormat.class.getName(), hiveTable.getSd().getOutputFormat());
    tableEnv.executeSql("create table tbl3 (m map<timestamp,binary>) partitioned by (p1 bigint,p2 tinyint) " + "row format serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'");
    hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl3"));
    assertEquals(2, hiveTable.getPartitionKeysSize());
    assertEquals(LazyBinarySerDe.class.getName(), hiveTable.getSd().getSerdeInfo().getSerializationLib());
    tableEnv.executeSql("create table tbl4 (x int,y smallint) row format delimited fields terminated by '|' lines terminated by '\n'");
    hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl4"));
    assertEquals("|", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.FIELD_DELIM));
    assertEquals("|", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.SERIALIZATION_FORMAT));
    assertEquals("\n", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.LINE_DELIM));
    tableEnv.executeSql("create table tbl5 (m map<bigint,string>) row format delimited collection items terminated by ';' " + "map keys terminated by ':'");
    hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl5"));
    assertEquals(";", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.COLLECTION_DELIM));
    assertEquals(":", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.MAPKEY_DELIM));
    int createdTimeForTableExists = hiveTable.getCreateTime();
    tableEnv.executeSql("create table if not exists tbl5 (m map<bigint,string>)");
    hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl5"));
    assertEquals(createdTimeForTableExists, hiveTable.getCreateTime());
    // test describe table
    Parser parser = ((TableEnvironmentInternal) tableEnv).getParser();
    DescribeTableOperation operation = (DescribeTableOperation) parser.parse("desc tbl1").get(0);
    assertFalse(operation.isExtended());
    assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl1"), operation.getSqlIdentifier());
    operation = (DescribeTableOperation) parser.parse("describe default.tbl2").get(0);
    assertFalse(operation.isExtended());
    assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl2"), operation.getSqlIdentifier());
    operation = (DescribeTableOperation) parser.parse("describe extended tbl3").get(0);
    assertTrue(operation.isExtended());
    assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl3"), operation.getSqlIdentifier());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) OrcInputFormat(org.apache.hadoop.hive.ql.io.orc.OrcInputFormat) LazyBinarySerDe(org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) HiveParser(org.apache.flink.table.planner.delegation.hive.HiveParser) Parser(org.apache.flink.table.delegation.Parser) Test(org.junit.Test)

Example 5 with TableEnvironmentInternal

use of org.apache.flink.table.api.internal.TableEnvironmentInternal in project flink by apache.

the class HiveDynamicTableFactoryTest method getTableSink.

private DynamicTableSink getTableSink(String tableName) throws Exception {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
    return FactoryUtil.createDynamicTableSink((DynamicTableSinkFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
Also used : TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

TableEnvironmentInternal (org.apache.flink.table.api.internal.TableEnvironmentInternal)18 Parser (org.apache.flink.table.delegation.Parser)7 Test (org.junit.Test)5 TableEnvironment (org.apache.flink.table.api.TableEnvironment)4 CatalogTable (org.apache.flink.table.catalog.CatalogTable)4 ExecutionContext (org.apache.flink.table.client.gateway.context.ExecutionContext)4 IOException (java.io.IOException)3 ParameterTool (org.apache.flink.api.java.utils.ParameterTool)3 Table (org.apache.flink.table.api.Table)3 TableException (org.apache.flink.table.api.TableException)3 TableResult (org.apache.flink.table.api.TableResult)3 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)3 HiveParser (org.apache.flink.table.planner.delegation.hive.HiveParser)3 Row (org.apache.flink.types.Row)3 ArrayList (java.util.ArrayList)2 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)2 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)2 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)2 SqlExecutionException (org.apache.flink.table.client.gateway.SqlExecutionException)2 CsvTableSink (org.apache.flink.table.sinks.CsvTableSink)2