Search in sources :

Example 6 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalogTest method testCreateGenericTable.

@Test
public void testCreateGenericTable() {
    Table hiveTable = HiveTableUtil.instantiateHiveTable(new ObjectPath("test", "test"), new CatalogTableImpl(schema, getLegacyFileSystemConnectorOptions("/test_path"), null), HiveTestUtils.createHiveConf(), false);
    Map<String, String> prop = hiveTable.getParameters();
    assertThat(HiveCatalog.isHiveTable(prop)).isFalse();
    assertThat(prop.keySet()).allMatch(k -> k.startsWith(CatalogPropertiesUtil.FLINK_PROPERTY_PREFIX));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 7 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalogTest method testCreateHiveTable.

@Test
public void testCreateHiveTable() {
    Map<String, String> options = getLegacyFileSystemConnectorOptions("/test_path");
    options.put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
    Table hiveTable = HiveTableUtil.instantiateHiveTable(new ObjectPath("test", "test"), new CatalogTableImpl(schema, options, null), HiveTestUtils.createHiveConf(), false);
    Map<String, String> prop = hiveTable.getParameters();
    assertThat(HiveCatalog.isHiveTable(prop)).isTrue();
    assertThat(prop.keySet()).noneMatch(k -> k.startsWith(CatalogPropertiesUtil.FLINK_PROPERTY_PREFIX));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 8 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveRunnerITCase method testStaticPartition.

@Test
public void testStaticPartition() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x int)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { 1 }).addRow(new Object[] { 2 }).commit();
        tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
        tableEnv.executeSql("insert into db1.dest partition (p1='1\\'1', p2=1.1) select x from db1.src").await();
        assertEquals(1, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
        verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1'1\t1.1", "2\t1'1\t1.1"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 9 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveSinkCompactionITCase method tearDown.

@After
public void tearDown() throws TableNotExistException {
    if (hiveCatalog != null) {
        hiveCatalog.dropTable(new ObjectPath(tEnv().getCurrentDatabase(), "sink_table"), true);
        hiveCatalog.close();
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) After(org.junit.After)

Example 10 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveSourceITCase method testRegularRead.

@Test
public void testRegularRead() throws Exception {
    // test non-partitioned table
    ObjectPath tablePath = new ObjectPath("default", "tbl1");
    Map<String, String> tableOptions = new HashMap<>();
    tableOptions.put(CONNECTOR.key(), IDENTIFIER);
    hiveCatalog.createTable(tablePath, new CatalogTableImpl(TableSchema.builder().field("i", DataTypes.INT()).build(), tableOptions, null), false);
    HiveTestUtils.createTextTableInserter(hiveCatalog, tablePath.getDatabaseName(), tablePath.getObjectName()).addRow(new Object[] { 1 }).addRow(new Object[] { 2 }).commit();
    StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    streamEnv.setParallelism(1);
    HiveSource<RowData> hiveSource = new HiveSourceBuilder(new JobConf(hiveCatalog.getHiveConf()), new Configuration(), HiveShimLoader.getHiveVersion(), tablePath.getDatabaseName(), tablePath.getObjectName(), Collections.emptyMap()).buildWithDefaultBulkFormat();
    List<RowData> results = CollectionUtil.iteratorToList(streamEnv.fromSource(hiveSource, WatermarkStrategy.noWatermarks(), "HiveSource-tbl1").executeAndCollect());
    assertEquals(2, results.size());
    assertEquals(1, results.get(0).getInt(0));
    assertEquals(2, results.get(1).getInt(0));
    hiveCatalog.dropTable(tablePath, false);
    // test partitioned table
    tablePath = new ObjectPath("default", "tbl2");
    hiveCatalog.createTable(tablePath, new CatalogTableImpl(TableSchema.builder().field("i", DataTypes.INT()).field("p", DataTypes.STRING()).build(), Collections.singletonList("p"), tableOptions, null), false);
    HiveTestUtils.createTextTableInserter(hiveCatalog, tablePath.getDatabaseName(), tablePath.getObjectName()).addRow(new Object[] { 1 }).addRow(new Object[] { 2 }).commit("p='a'");
    hiveSource = new HiveSourceBuilder(new JobConf(hiveCatalog.getHiveConf()), new Configuration(), HiveShimLoader.getHiveVersion(), tablePath.getDatabaseName(), tablePath.getObjectName(), Collections.emptyMap()).setLimit(1L).buildWithDefaultBulkFormat();
    results = CollectionUtil.iteratorToList(streamEnv.fromSource(hiveSource, WatermarkStrategy.noWatermarks(), "HiveSource-tbl2").executeAndCollect());
    assertEquals(1, results.size());
    assertEquals(1, results.get(0).getInt(0));
    assertEquals("a", results.get(0).getString(1).toString());
    HiveTestUtils.createTextTableInserter(hiveCatalog, tablePath.getDatabaseName(), tablePath.getObjectName()).addRow(new Object[] { 3 }).commit("p='b'");
    LinkedHashMap<String, String> spec = new LinkedHashMap<>();
    spec.put("p", "b");
    hiveSource = new HiveSourceBuilder(new JobConf(hiveCatalog.getHiveConf()), new Configuration(), null, tablePath.getDatabaseName(), tablePath.getObjectName(), Collections.emptyMap()).setPartitions(Collections.singletonList(HiveTablePartition.ofPartition(hiveCatalog.getHiveConf(), hiveCatalog.getHiveVersion(), tablePath.getDatabaseName(), tablePath.getObjectName(), spec))).buildWithDefaultBulkFormat();
    results = CollectionUtil.iteratorToList(streamEnv.fromSource(hiveSource, WatermarkStrategy.noWatermarks(), "HiveSource-tbl2").executeAndCollect());
    assertEquals(1, results.size());
    assertEquals(3, results.get(0).getInt(0));
    assertEquals("b", results.get(0).getString(1).toString());
    hiveCatalog.dropTable(tablePath, false);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) LinkedHashMap(java.util.LinkedHashMap) RowData(org.apache.flink.table.data.RowData) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Aggregations

ObjectPath (org.apache.flink.table.catalog.ObjectPath)81 Test (org.junit.Test)52 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)32 CatalogTable (org.apache.flink.table.catalog.CatalogTable)29 HashMap (java.util.HashMap)21 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)20 TableSchema (org.apache.flink.table.api.TableSchema)19 TableEnvironment (org.apache.flink.table.api.TableEnvironment)17 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)12 Table (org.apache.hadoop.hive.metastore.api.Table)12 Configuration (org.apache.flink.configuration.Configuration)11 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)11 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)9 ArrayList (java.util.ArrayList)8 Map (java.util.Map)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)8 LinkedHashMap (java.util.LinkedHashMap)7 Catalog (org.apache.flink.table.catalog.Catalog)7 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)6 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)6