Search in sources :

Example 26 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testTableWithPrimaryKey.

@Test
public void testTableWithPrimaryKey() {
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inStreamingMode());
    tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);
    tableEnv.registerCatalog("catalog1", hiveCatalog);
    tableEnv.useCatalog("catalog1");
    final String createTable = "CREATE TABLE pk_src (\n" + "  uuid varchar(40) not null,\n" + "  price DECIMAL(10, 2),\n" + "  currency STRING,\n" + "  ts6 TIMESTAMP(6),\n" + "  ts AS CAST(ts6 AS TIMESTAMP(3)),\n" + "  WATERMARK FOR ts AS ts,\n" + "  constraint ct1 PRIMARY KEY(uuid) NOT ENFORCED)\n" + "  WITH (\n" + "    'connector.type' = 'filesystem'," + "    'connector.path' = 'file://fakePath'," + "    'format.type' = 'csv')";
    tableEnv.executeSql(createTable);
    TableSchema tableSchema = tableEnv.getCatalog(tableEnv.getCurrentCatalog()).map(catalog -> {
        try {
            final ObjectPath tablePath = ObjectPath.fromString(catalog.getDefaultDatabase() + '.' + "pk_src");
            return catalog.getTable(tablePath).getSchema();
        } catch (TableNotExistException e) {
            return null;
        }
    }).orElse(null);
    assertThat(tableSchema).isNotNull();
    assertThat(tableSchema.getPrimaryKey()).hasValue(UniqueConstraint.primaryKey("ct1", Collections.singletonList("uuid")));
    tableEnv.executeSql("DROP TABLE pk_src");
}
Also used : Arrays(java.util.Arrays) Schema(org.apache.flink.table.api.Schema) FileUtils(org.apache.flink.util.FileUtils) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) CatalogTable(org.apache.flink.table.catalog.CatalogTable) FLINK_PROPERTY_PREFIX(org.apache.flink.table.catalog.CatalogPropertiesUtil.FLINK_PROPERTY_PREFIX) Future(java.util.concurrent.Future) Map(java.util.Map) URI(java.net.URI) Path(java.nio.file.Path) TableEnvironment(org.apache.flink.table.api.TableEnvironment) AfterClass(org.junit.AfterClass) Expressions.$(org.apache.flink.table.api.Expressions.$) TableSchema(org.apache.flink.table.api.TableSchema) Table(org.apache.flink.table.api.Table) TestCollectionTableFactory(org.apache.flink.table.planner.factories.utils.TestCollectionTableFactory) Executors(java.util.concurrent.Executors) List(java.util.List) FactoryUtil(org.apache.flink.table.factories.FactoryUtil) ManagedTableFactory(org.apache.flink.table.factories.ManagedTableFactory) Row(org.apache.flink.types.Row) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) BeforeClass(org.junit.BeforeClass) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM) HashMap(java.util.HashMap) Callable(java.util.concurrent.Callable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) CatalogView(org.apache.flink.table.catalog.CatalogView) Catalog(org.apache.flink.table.catalog.Catalog) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) ExecutorService(java.util.concurrent.ExecutorService) AbstractDataType(org.apache.flink.table.types.AbstractDataType) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) PrintStream(java.io.PrintStream) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Files(java.nio.file.Files) Configuration(org.apache.flink.configuration.Configuration) DataTypes(org.apache.flink.table.api.DataTypes) Test(org.junit.Test) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CollectionUtil(org.apache.flink.util.CollectionUtil) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) CONNECTOR(org.apache.flink.table.factories.FactoryUtil.CONNECTOR) Rule(org.junit.Rule) CoreOptions(org.apache.flink.configuration.CoreOptions) Paths(java.nio.file.Paths) SqlDialect(org.apache.flink.table.api.SqlDialect) EnvironmentSettings(org.apache.flink.table.api.EnvironmentSettings) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Comparator(java.util.Comparator) Collections(java.util.Collections) TemporaryFolder(org.junit.rules.TemporaryFolder) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 27 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testWriteComplexType.

@Test
public void testWriteComplexType() throws Exception {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithHiveCatalog(hiveCatalog);
    Row row = new Row(3);
    Object[] array = new Object[] { 1, 2, 3 };
    Map<Integer, String> map = new HashMap<>();
    map.put(1, "a");
    map.put(2, "b");
    Row struct = new Row(2);
    struct.setField(0, 3);
    struct.setField(1, "c");
    row.setField(0, array);
    row.setField(1, map);
    row.setField(2, struct);
    TestCollectionTableFactory.reset();
    TestCollectionTableFactory.initData(Collections.singletonList(row));
    tableEnv.executeSql("create table default_catalog.default_database.complexSrc (a array<int>,m map<int, string>,s row<f1 int,f2 string>) " + "with ('connector'='COLLECTION','is-bounded' = 'true')");
    tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
    tableEnv.executeSql("create table dest (a array<int>,m map<int, string>,s struct<f1:int,f2:string>)");
    try {
        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
        tableEnv.executeSql("insert into dest select * from default_catalog.default_database.complexSrc").await();
        List<String> result = hiveShell.executeQuery("select * from dest");
        assertEquals(1, result.size());
        assertEquals("[1,2,3]\t{1:\"a\",2:\"b\"}\t{\"f1\":3,\"f2\":\"c\"}", result.get(0));
    } finally {
        tableEnv.executeSql("drop table dest");
    }
}
Also used : HashMap(java.util.HashMap) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 28 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testStaticPartition.

@Test
public void testStaticPartition() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x int)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { 1 }).addRow(new Object[] { 2 }).commit();
        tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
        tableEnv.executeSql("insert into db1.dest partition (p1='1\\'1', p2=1.1) select x from db1.src").await();
        assertEquals(1, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
        verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1'1\t1.1", "2\t1'1\t1.1"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 29 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testInsertIntoNonPartitionTable.

@Test
public void testInsertIntoNonPartitionTable() throws Exception {
    List<Row> toWrite = generateRecords(5);
    TestCollectionTableFactory.reset();
    TestCollectionTableFactory.initData(toWrite);
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithHiveCatalog(hiveCatalog);
    tableEnv.executeSql("create table default_catalog.default_database.src (i int,l bigint,d double,s string) " + "with ('connector'='COLLECTION','is-bounded' = 'true')");
    tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
    tableEnv.executeSql("create table dest (i int,l bigint,d double,s string)");
    try {
        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
        tableEnv.executeSql("insert into dest select * from default_catalog.default_database.src").await();
        verifyWrittenData(toWrite, hiveShell.executeQuery("select * from dest"));
    } finally {
        tableEnv.executeSql("drop table dest");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 30 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testRegexSerDe.

@Test
public void testRegexSerDe() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x int,y string) " + "row format serde 'org.apache.hadoop.hive.serde2.RegexSerDe' " + "with serdeproperties ('input.regex'='([\\\\d]+)\\u0001([\\\\S]+)')");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { 1, "a" }).addRow(new Object[] { 2, "ab" }).commit();
        assertEquals("[+I[1, a], +I[2, ab]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src order by x").execute().collect()).toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4