Search in sources :

Example 16 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class DependencyTest method testTableFactoryDiscovery.

@Test
public void testTableFactoryDiscovery() throws Exception {
    final LocalExecutor executor = createLocalExecutor();
    try {
        final TableResult tableResult = executeSql(executor, SESSION_ID, "DESCRIBE TableNumber1");
        assertEquals(tableResult.getResolvedSchema(), ResolvedSchema.physical(new String[] { "name", "type", "null", "key", "extras", "watermark" }, new DataType[] { DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING() }));
        List<Row> schemaData = Arrays.asList(Row.of("IntegerField1", "INT", true, null, null, null), Row.of("StringField1", "STRING", true, null, null, null), Row.of("rowtimeField", "TIMESTAMP(3) *ROWTIME*", true, null, null, "`rowtimeField`"));
        assertEquals(schemaData, CollectionUtil.iteratorToList(tableResult.collect()));
    } finally {
        executor.closeSession(SESSION_ID);
    }
}
Also used : TableResult(org.apache.flink.table.api.TableResult) DataType(org.apache.flink.table.types.DataType) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 17 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class CliClient method callInserts.

private void callInserts(List<ModifyOperation> operations) {
    printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT);
    boolean sync = executor.getSessionConfig(sessionId).get(TABLE_DML_SYNC);
    if (sync) {
        printInfo(MESSAGE_WAIT_EXECUTE);
    }
    TableResult tableResult = executor.executeModifyOperations(sessionId, operations);
    checkState(tableResult.getJobClient().isPresent());
    if (sync) {
        terminal.writer().println(CliStrings.messageInfo(MESSAGE_FINISH_STATEMENT).toAnsi());
    } else {
        terminal.writer().println(CliStrings.messageInfo(MESSAGE_STATEMENT_SUBMITTED).toAnsi());
        terminal.writer().println(String.format("Job ID: %s\n", tableResult.getJobClient().get().getJobID().toString()));
    }
    terminal.flush();
}
Also used : TableResult(org.apache.flink.table.api.TableResult)

Example 18 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class CommonExecSinkITCase method testCharLengthEnforcer.

@Test
public void testCharLengthEnforcer() throws ExecutionException, InterruptedException {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final List<Row> rows = Arrays.asList(Row.of(1, "Apache Flink", "SQL RuleZ", 11, 111, "SQL"), Row.of(2, "Apache", "SQL", 22, 222, "Flink"), Row.of(3, "Apache", "Flink", 33, 333, "Apache Flink SQL"), Row.of(4, "Flink Project", "SQL or SeQueL?", 44, 444, "Apache Flink SQL"));
    final TableDescriptor sourceDescriptor = TableFactoryHarness.newBuilder().schema(schemaForCharLengthEnforcer()).source(new TestSource(rows)).build();
    tableEnv.createTable("T1", sourceDescriptor);
    // Default config - ignore (no trim)
    TableResult result = tableEnv.executeSql("SELECT * FROM T1");
    result.await();
    final List<Row> results = new ArrayList<>();
    result.collect().forEachRemaining(results::add);
    assertThat(results).containsExactlyInAnyOrderElementsOf(rows);
    // accordingly, based on their type length
    try {
        tableEnv.getConfig().set(TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER.key(), ExecutionConfigOptions.TypeLengthEnforcer.TRIM_PAD.name());
        result = tableEnv.executeSql("SELECT * FROM T1");
        result.await();
        final List<Row> expected = Arrays.asList(Row.of(1, "Apache F", "SQL Ru", 11, 111, "SQL"), Row.of(2, "Apache  ", "SQL   ", 22, 222, "Flink"), Row.of(3, "Apache  ", "Flink ", 33, 333, "Apache"), Row.of(4, "Flink Pr", "SQL or", 44, 444, "Apache"));
        final List<Row> resultsTrimmed = new ArrayList<>();
        result.collect().forEachRemaining(resultsTrimmed::add);
        assertThat(resultsTrimmed).containsExactlyInAnyOrderElementsOf(expected);
    } finally {
        tableEnv.getConfig().set(TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER.key(), ExecutionConfigOptions.TypeLengthEnforcer.IGNORE.name());
    }
}
Also used : TableResult(org.apache.flink.table.api.TableResult) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) TableDescriptor(org.apache.flink.table.api.TableDescriptor) Test(org.junit.Test)

Example 19 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class DataStreamJavaITCase method testFromAndToDataStreamWithPojo.

@Test
public void testFromAndToDataStreamWithPojo() throws Exception {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final ComplexPojo[] pojos = { ComplexPojo.of(42, "hello", new ImmutablePojo(42.0, null)), ComplexPojo.of(42, null, null) };
    final DataStream<ComplexPojo> dataStream = env.fromElements(pojos);
    // reorders columns and enriches the immutable type
    final Table table = tableEnv.fromDataStream(dataStream, Schema.newBuilder().column("c", INT()).column("a", STRING()).column("p", DataTypes.of(ImmutablePojo.class)).build());
    testSchema(table, Column.physical("c", INT()), Column.physical("a", STRING()), Column.physical("p", STRUCTURED(ImmutablePojo.class, FIELD("d", DOUBLE()), FIELD("b", BOOLEAN()))));
    tableEnv.createTemporaryView("t", table);
    final TableResult result = tableEnv.executeSql("SELECT p, p.d, p.b FROM t");
    testResult(result, Row.of(new ImmutablePojo(42.0, null), 42.0, null), Row.of(null, null, null));
    testResult(tableEnv.toDataStream(table, ComplexPojo.class), pojos);
}
Also used : Table(org.apache.flink.table.api.Table) TableResult(org.apache.flink.table.api.TableResult) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Test(org.junit.Test)

Example 20 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class DataStreamJavaITCase method testFromAndToDataStreamEventTime.

@Test
public void testFromAndToDataStreamEventTime() throws Exception {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final DataStream<Tuple3<Long, Integer, String>> dataStream = getWatermarkedDataStream();
    final Table table = tableEnv.fromDataStream(dataStream, Schema.newBuilder().columnByMetadata("rowtime", "TIMESTAMP_LTZ(3)").watermark("rowtime", "SOURCE_WATERMARK()").build());
    testSchema(table, new ResolvedSchema(Arrays.asList(Column.physical("f0", BIGINT().notNull()), Column.physical("f1", INT().notNull()), Column.physical("f2", STRING()), Column.metadata("rowtime", TIMESTAMP_LTZ(3), null, false)), Collections.singletonList(WatermarkSpec.of("rowtime", ResolvedExpressionMock.of(TIMESTAMP_LTZ(3), "`SOURCE_WATERMARK`()"))), null));
    tableEnv.createTemporaryView("t", table);
    final TableResult result = tableEnv.executeSql("SELECT f2, SUM(f1) FROM t GROUP BY f2, TUMBLE(rowtime, INTERVAL '0.005' SECOND)");
    testResult(result, Row.of("a", 47), Row.of("c", 1000), Row.of("c", 1000));
    testResult(tableEnv.toDataStream(table).keyBy(k -> k.getField("f2")).window(TumblingEventTimeWindows.of(Time.milliseconds(5))).<Row>apply((key, window, input, out) -> {
        int sum = 0;
        for (Row row : input) {
            sum += row.<Integer>getFieldAs("f1");
        }
        out.collect(Row.of(key, sum));
    }).returns(Types.ROW(Types.STRING, Types.INT)), Row.of("a", 47), Row.of("c", 1000), Row.of("c", 1000));
}
Also used : Table(org.apache.flink.table.api.Table) TableResult(org.apache.flink.table.api.TableResult) Tuple3(org.apache.flink.api.java.tuple.Tuple3) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) Test(org.junit.Test)

Aggregations

TableResult (org.apache.flink.table.api.TableResult)39 Test (org.junit.Test)26 Row (org.apache.flink.types.Row)20 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)15 ArrayList (java.util.ArrayList)7 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)7 Table (org.apache.flink.table.api.Table)7 TableEnvironment (org.apache.flink.table.api.TableEnvironment)7 JobClient (org.apache.flink.core.execution.JobClient)4 Configuration (org.apache.flink.configuration.Configuration)3 ParameterTool (org.apache.flink.api.java.utils.ParameterTool)2 TableDescriptor (org.apache.flink.table.api.TableDescriptor)2 TableEnvironmentInternal (org.apache.flink.table.api.internal.TableEnvironmentInternal)2 CsvTableSink (org.apache.flink.table.sinks.CsvTableSink)2 DataType (org.apache.flink.table.types.DataType)2 File (java.io.File)1 IOException (java.io.IOException)1 BigDecimal (java.math.BigDecimal)1 Timestamp (java.sql.Timestamp)1 Random (java.util.Random)1