Search in sources :

Example 46 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class JavaCatalogTableTest method testResolvingSchemaOfCustomCatalogTableTableApi.

@Test
public void testResolvingSchemaOfCustomCatalogTableTableApi() throws Exception {
    TableTestUtil testUtil = getTestUtil();
    TableEnvironment tableEnvironment = testUtil.getTableEnv();
    GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog("in-memory");
    genericInMemoryCatalog.createTable(new ObjectPath("default", "testTable"), new CustomCatalogTable(isStreamingMode), false);
    tableEnvironment.registerCatalog("testCatalog", genericInMemoryCatalog);
    Table table = tableEnvironment.from("testCatalog.`default`.testTable").window(Tumble.over(lit(10).minute()).on($("rowtime")).as("w")).groupBy($("w")).select(lit(1).count());
    testUtil.verifyExecPlan(table);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Table(org.apache.flink.table.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableTestUtil(org.apache.flink.table.planner.utils.TableTestUtil) TableEnvironment(org.apache.flink.table.api.TableEnvironment) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) Test(org.junit.Test)

Example 47 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class JavaCatalogTableTest method testResolvingProctimeOfCustomTableTableApi.

@Test
public void testResolvingProctimeOfCustomTableTableApi() throws Exception {
    if (!isStreamingMode) {
        // proctime not supported in batch
        return;
    }
    TableTestUtil testUtil = getTestUtil();
    TableEnvironment tableEnvironment = testUtil.getTableEnv();
    GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog("in-memory");
    genericInMemoryCatalog.createTable(new ObjectPath("default", "testTable"), new CustomCatalogTable(isStreamingMode), false);
    tableEnvironment.registerCatalog("testCatalog", genericInMemoryCatalog);
    Table table = tableEnvironment.from("testCatalog.`default`.testTable").window(Tumble.over(lit(10).minute()).on($("proctime")).as("w")).groupBy($("w")).select(lit(1).count());
    testUtil.verifyExecPlan(table);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Table(org.apache.flink.table.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableTestUtil(org.apache.flink.table.planner.utils.TableTestUtil) TableEnvironment(org.apache.flink.table.api.TableEnvironment) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) Test(org.junit.Test)

Example 48 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class JavaCatalogTableTest method testResolvingProctimeOfCustomTableSql.

@Test
public void testResolvingProctimeOfCustomTableSql() throws Exception {
    if (!isStreamingMode) {
        // proctime not supported in batch
        return;
    }
    TableTestUtil testUtil = getTestUtil();
    TableEnvironment tableEnvironment = testUtil.getTableEnv();
    GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog("in-memory");
    genericInMemoryCatalog.createTable(new ObjectPath("default", "testTable"), new CustomCatalogTable(isStreamingMode), false);
    tableEnvironment.registerCatalog("testCatalog", genericInMemoryCatalog);
    testUtil.verifyExecPlan("SELECT COUNT(*) FROM testCatalog.`default`.testTable " + "GROUP BY TUMBLE(proctime, INTERVAL '10' MINUTE)");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableTestUtil(org.apache.flink.table.planner.utils.TableTestUtil) TableEnvironment(org.apache.flink.table.api.TableEnvironment) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) Test(org.junit.Test)

Example 49 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class BuiltInFunctionTestBase method testFunction.

@Test
public void testFunction() {
    final TableEnvironment env = TableEnvironment.create(EnvironmentSettings.newInstance().build());
    env.getConfig().addConfiguration(configuration());
    testSpec.functions.forEach(f -> env.createTemporarySystemFunction(f.getSimpleName(), f));
    final DataTypeFactory dataTypeFactory = ((TableEnvironmentInternal) env).getCatalogManager().getDataTypeFactory();
    final Table inputTable;
    if (testSpec.fieldDataTypes == null) {
        inputTable = env.fromValues(Row.of(testSpec.fieldData));
    } else {
        final DataTypes.UnresolvedField[] fields = IntStream.range(0, testSpec.fieldDataTypes.length).mapToObj(i -> DataTypes.FIELD("f" + i, testSpec.fieldDataTypes[i])).toArray(DataTypes.UnresolvedField[]::new);
        inputTable = env.fromValues(DataTypes.ROW(fields), Row.of(testSpec.fieldData));
    }
    for (TestItem testItem : testSpec.testItems) {
        try {
            if (testItem instanceof ResultTestItem<?>) {
                testResult(dataTypeFactory, env, inputTable, (ResultTestItem<?>) testItem);
            } else if (testItem instanceof ErrorTestItem<?>) {
                testError(env, inputTable, (ErrorTestItem<?>) testItem);
            }
        } catch (Throwable t) {
            throw new AssertionError("Failing test item: " + testItem, t);
        }
    }
}
Also used : DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) BuiltInFunctionDefinition(org.apache.flink.table.functions.BuiltInFunctionDefinition) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) RunWith(org.junit.runner.RunWith) Expression(org.apache.flink.table.expressions.Expression) AtomicReference(java.util.concurrent.atomic.AtomicReference) MiniClusterResourceConfiguration(org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration) ArrayList(java.util.ArrayList) Collections.singletonList(java.util.Collections.singletonList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Assertions.catchThrowable(org.assertj.core.api.Assertions.catchThrowable) ClassRule(org.junit.ClassRule) FlinkAssertions.anyCauseMatches(org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches) Parameterized(org.junit.runners.Parameterized) Nullable(javax.annotation.Nullable) AbstractDataType(org.apache.flink.table.types.AbstractDataType) MiniClusterWithClientResource(org.apache.flink.test.util.MiniClusterWithClientResource) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Iterator(java.util.Iterator) Parameter(org.junit.runners.Parameterized.Parameter) Configuration(org.apache.flink.configuration.Configuration) DataTypes(org.apache.flink.table.api.DataTypes) UserDefinedFunction(org.apache.flink.table.functions.UserDefinedFunction) Test(org.junit.Test) Table(org.apache.flink.table.api.Table) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) Consumer(java.util.function.Consumer) List(java.util.List) ValidationException(org.apache.flink.table.api.ValidationException) EnvironmentSettings(org.apache.flink.table.api.EnvironmentSettings) TableResult(org.apache.flink.table.api.TableResult) Row(org.apache.flink.types.Row) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) Table(org.apache.flink.table.api.Table) TableEnvironment(org.apache.flink.table.api.TableEnvironment) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) Assertions.catchThrowable(org.assertj.core.api.Assertions.catchThrowable) DataTypes(org.apache.flink.table.api.DataTypes) Test(org.junit.Test)

Example 50 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class CatalogITCase method testCreateCatalog.

@Test
public void testCreateCatalog() {
    String name = "c1";
    TableEnvironment tableEnv = getTableEnvironment();
    String ddl = String.format("create catalog %s with('type'='%s')", name, GenericInMemoryCatalogFactoryOptions.IDENTIFIER);
    tableEnv.executeSql(ddl);
    assertTrue(tableEnv.getCatalog(name).isPresent());
    assertTrue(tableEnv.getCatalog(name).get() instanceof GenericInMemoryCatalog);
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4