Search in sources :

Example 11 with CatalogTableImpl

use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.

the class HiveCatalogTest method testCreateHiveTable.

@Test
public void testCreateHiveTable() {
    Map<String, String> options = getLegacyFileSystemConnectorOptions("/test_path");
    options.put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
    Table hiveTable = HiveTableUtil.instantiateHiveTable(new ObjectPath("test", "test"), new CatalogTableImpl(schema, options, null), HiveTestUtils.createHiveConf(), false);
    Map<String, String> prop = hiveTable.getParameters();
    assertThat(HiveCatalog.isHiveTable(prop)).isTrue();
    assertThat(prop.keySet()).noneMatch(k -> k.startsWith(CatalogPropertiesUtil.FLINK_PROPERTY_PREFIX));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 12 with CatalogTableImpl

use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.

the class HiveSourceITCase method testRegularRead.

@Test
public void testRegularRead() throws Exception {
    // test non-partitioned table
    ObjectPath tablePath = new ObjectPath("default", "tbl1");
    Map<String, String> tableOptions = new HashMap<>();
    tableOptions.put(CONNECTOR.key(), IDENTIFIER);
    hiveCatalog.createTable(tablePath, new CatalogTableImpl(TableSchema.builder().field("i", DataTypes.INT()).build(), tableOptions, null), false);
    HiveTestUtils.createTextTableInserter(hiveCatalog, tablePath.getDatabaseName(), tablePath.getObjectName()).addRow(new Object[] { 1 }).addRow(new Object[] { 2 }).commit();
    StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    streamEnv.setParallelism(1);
    HiveSource<RowData> hiveSource = new HiveSourceBuilder(new JobConf(hiveCatalog.getHiveConf()), new Configuration(), HiveShimLoader.getHiveVersion(), tablePath.getDatabaseName(), tablePath.getObjectName(), Collections.emptyMap()).buildWithDefaultBulkFormat();
    List<RowData> results = CollectionUtil.iteratorToList(streamEnv.fromSource(hiveSource, WatermarkStrategy.noWatermarks(), "HiveSource-tbl1").executeAndCollect());
    assertEquals(2, results.size());
    assertEquals(1, results.get(0).getInt(0));
    assertEquals(2, results.get(1).getInt(0));
    hiveCatalog.dropTable(tablePath, false);
    // test partitioned table
    tablePath = new ObjectPath("default", "tbl2");
    hiveCatalog.createTable(tablePath, new CatalogTableImpl(TableSchema.builder().field("i", DataTypes.INT()).field("p", DataTypes.STRING()).build(), Collections.singletonList("p"), tableOptions, null), false);
    HiveTestUtils.createTextTableInserter(hiveCatalog, tablePath.getDatabaseName(), tablePath.getObjectName()).addRow(new Object[] { 1 }).addRow(new Object[] { 2 }).commit("p='a'");
    hiveSource = new HiveSourceBuilder(new JobConf(hiveCatalog.getHiveConf()), new Configuration(), HiveShimLoader.getHiveVersion(), tablePath.getDatabaseName(), tablePath.getObjectName(), Collections.emptyMap()).setLimit(1L).buildWithDefaultBulkFormat();
    results = CollectionUtil.iteratorToList(streamEnv.fromSource(hiveSource, WatermarkStrategy.noWatermarks(), "HiveSource-tbl2").executeAndCollect());
    assertEquals(1, results.size());
    assertEquals(1, results.get(0).getInt(0));
    assertEquals("a", results.get(0).getString(1).toString());
    HiveTestUtils.createTextTableInserter(hiveCatalog, tablePath.getDatabaseName(), tablePath.getObjectName()).addRow(new Object[] { 3 }).commit("p='b'");
    LinkedHashMap<String, String> spec = new LinkedHashMap<>();
    spec.put("p", "b");
    hiveSource = new HiveSourceBuilder(new JobConf(hiveCatalog.getHiveConf()), new Configuration(), null, tablePath.getDatabaseName(), tablePath.getObjectName(), Collections.emptyMap()).setPartitions(Collections.singletonList(HiveTablePartition.ofPartition(hiveCatalog.getHiveConf(), hiveCatalog.getHiveVersion(), tablePath.getDatabaseName(), tablePath.getObjectName(), spec))).buildWithDefaultBulkFormat();
    results = CollectionUtil.iteratorToList(streamEnv.fromSource(hiveSource, WatermarkStrategy.noWatermarks(), "HiveSource-tbl2").executeAndCollect());
    assertEquals(1, results.size());
    assertEquals(3, results.get(0).getInt(0));
    assertEquals("b", results.get(0).getString(1).toString());
    hiveCatalog.dropTable(tablePath, false);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) LinkedHashMap(java.util.LinkedHashMap) RowData(org.apache.flink.table.data.RowData) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Example 13 with CatalogTableImpl

use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.

the class HiveTableFactoryTest method testGenericTable.

@Test
public void testGenericTable() throws Exception {
    final TableSchema schema = TableSchema.builder().field("name", DataTypes.STRING()).field("age", DataTypes.INT()).build();
    catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
    final Map<String, String> options = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), "COLLECTION");
    final CatalogTable table = new CatalogTableImpl(schema, options, "csv table");
    catalog.createTable(new ObjectPath("mydb", "mytable"), table, true);
    final Optional<TableFactory> tableFactoryOpt = catalog.getTableFactory();
    assertTrue(tableFactoryOpt.isPresent());
    final HiveTableFactory tableFactory = (HiveTableFactory) tableFactoryOpt.get();
    final TableSource tableSource = tableFactory.createTableSource(new TableSourceFactoryContextImpl(ObjectIdentifier.of("mycatalog", "mydb", "mytable"), table, new Configuration(), false));
    assertTrue(tableSource instanceof StreamTableSource);
    final TableSink tableSink = tableFactory.createTableSink(new TableSinkFactoryContextImpl(ObjectIdentifier.of("mycatalog", "mydb", "mytable"), table, new Configuration(), true, false));
    assertTrue(tableSink instanceof StreamTableSink);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) StreamTableSink(org.apache.flink.table.sinks.StreamTableSink) TableSink(org.apache.flink.table.sinks.TableSink) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) StreamTableSink(org.apache.flink.table.sinks.StreamTableSink) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) StreamTableSource(org.apache.flink.table.sources.StreamTableSource) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) TableSinkFactoryContextImpl(org.apache.flink.table.factories.TableSinkFactoryContextImpl) TableSource(org.apache.flink.table.sources.TableSource) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) StreamTableSource(org.apache.flink.table.sources.StreamTableSource) TableFactory(org.apache.flink.table.factories.TableFactory) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) TableSourceFactoryContextImpl(org.apache.flink.table.factories.TableSourceFactoryContextImpl) Test(org.junit.Test)

Example 14 with CatalogTableImpl

use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.

the class HiveTableFactoryTest method testHiveTable.

@Test
public void testHiveTable() throws Exception {
    final ResolvedSchema schema = ResolvedSchema.of(Column.physical("name", DataTypes.STRING()), Column.physical("age", DataTypes.INT()));
    catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
    final Map<String, String> options = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
    final CatalogTable table = new CatalogTableImpl(TableSchema.fromResolvedSchema(schema), options, "hive table");
    catalog.createTable(new ObjectPath("mydb", "mytable"), table, true);
    final DynamicTableSource tableSource = FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) catalog.getFactory().orElseThrow(IllegalStateException::new), ObjectIdentifier.of("mycatalog", "mydb", "mytable"), new ResolvedCatalogTable(table, schema), new Configuration(), Thread.currentThread().getContextClassLoader(), false);
    assertTrue(tableSource instanceof HiveTableSource);
    final DynamicTableSink tableSink = FactoryUtil.createDynamicTableSink((DynamicTableSinkFactory) catalog.getFactory().orElseThrow(IllegalStateException::new), ObjectIdentifier.of("mycatalog", "mydb", "mytable"), new ResolvedCatalogTable(table, schema), new Configuration(), Thread.currentThread().getContextClassLoader(), false);
    assertTrue(tableSink instanceof HiveTableSink);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.Test)

Example 15 with CatalogTableImpl

use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.

the class HiveCatalogGenericMetadataTest method testGenericTableSchema.

// ------ tables ------
@Test
public void testGenericTableSchema() throws Exception {
    catalog.createDatabase(db1, createDb(), false);
    TableSchema tableSchema = TableSchema.builder().fields(new String[] { "col1", "col2", "col3" }, new DataType[] { DataTypes.TIMESTAMP(3), DataTypes.TIMESTAMP(6), DataTypes.TIMESTAMP(9) }).watermark("col3", "col3", DataTypes.TIMESTAMP(9)).build();
    ObjectPath tablePath = new ObjectPath(db1, "generic_table");
    try {
        catalog.createTable(tablePath, new CatalogTableImpl(tableSchema, getBatchTableProperties(), TEST_COMMENT), false);
        assertEquals(tableSchema, catalog.getTable(tablePath).getSchema());
    } finally {
        catalog.dropTable(tablePath, true);
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Aggregations

CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)39 CatalogTable (org.apache.flink.table.catalog.CatalogTable)26 Test (org.junit.Test)24 TableSchema (org.apache.flink.table.api.TableSchema)21 HashMap (java.util.HashMap)20 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)7 Configuration (org.apache.flink.configuration.Configuration)6 LinkedHashMap (java.util.LinkedHashMap)5 ValidationException (org.apache.flink.table.api.ValidationException)5 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)5 AlterTableSchemaOperation (org.apache.flink.table.operations.ddl.AlterTableSchemaOperation)5 TableColumn (org.apache.flink.table.api.TableColumn)4 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)4 Table (org.apache.hadoop.hive.metastore.api.Table)4 ArrayList (java.util.ArrayList)3 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)3 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)3 IOException (java.io.IOException)2 Path (java.nio.file.Path)2