Search in sources :

Example 31 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class LocalExecutorITCase method executeAndVerifySinkResult.

private void executeAndVerifySinkResult(Executor executor, String sessionId, String statement, String resultPath) throws Exception {
    final TableResult tableResult = executeSql(executor, sessionId, statement);
    checkState(tableResult.getJobClient().isPresent());
    // wait for job completion
    tableResult.await();
    // verify result
    verifySinkResult(resultPath);
}
Also used : TableResult(org.apache.flink.table.api.TableResult)

Example 32 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class CliClient method printRawContent.

public void printRawContent(Operation operation) {
    TableResult tableResult = executor.executeOperation(sessionId, operation);
    // show raw content instead of tableau style
    final String explanation = Objects.requireNonNull(tableResult.collect().next().getField(0)).toString();
    terminal.writer().println(explanation);
    terminal.flush();
}
Also used : TableResult(org.apache.flink.table.api.TableResult)

Example 33 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class DataStreamJavaITCase method testFromDataStreamWithRow.

@Test
public void testFromDataStreamWithRow() {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final TypeInformation<Row> typeInfo = Types.ROW_NAMED(new String[] { "b", "c", "a" }, Types.INT, Types.ROW(Types.BOOLEAN, Types.STRING), Types.MAP(Types.STRING, Types.DOUBLE));
    final Row[] rows = new Row[] { Row.of(12, Row.of(false, "hello"), Collections.singletonMap("world", 2.0)), Row.of(null, Row.of(false, null), Collections.singletonMap("world", null)) };
    final DataStream<Row> dataStream = env.fromCollection(Arrays.asList(rows), typeInfo);
    final TableResult result = tableEnv.fromDataStream(dataStream).execute();
    testSchema(result, Column.physical("b", INT()), Column.physical("c", ROW(FIELD("f0", BOOLEAN()), FIELD("f1", STRING()))), Column.physical("a", MAP(STRING(), DOUBLE())));
    testResult(result, rows);
}
Also used : TableResult(org.apache.flink.table.api.TableResult) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 34 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class DataStreamJavaITCase method testFromDataStreamAtomic.

@Test
public void testFromDataStreamAtomic() {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final DataStream<Integer> dataStream = env.fromElements(1, 2, 3, 4, 5);
    // wraps the atomic type
    final TableResult result = tableEnv.fromDataStream(dataStream).execute();
    testSchema(result, Column.physical("f0", INT().notNull()));
    testResult(result, Row.of(1), Row.of(2), Row.of(3), Row.of(4), Row.of(5));
}
Also used : TableResult(org.apache.flink.table.api.TableResult) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Test(org.junit.Test)

Example 35 with TableResult

use of org.apache.flink.table.api.TableResult in project flink by apache.

the class CommonExecSinkITCase method testBinaryLengthEnforcer.

@Test
public void testBinaryLengthEnforcer() throws ExecutionException, InterruptedException {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final List<Row> rows = Arrays.asList(Row.of(1, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 }, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }, 11, 111, new byte[] { 1, 2, 3 }), Row.of(2, new byte[] { 1, 2, 3, 4, 5 }, new byte[] { 1, 2, 3 }, 22, 222, new byte[] { 1, 2, 3, 4, 5, 6 }), Row.of(3, new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 4, 5 }, 33, 333, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }), Row.of(4, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 44, 444, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }));
    final TableDescriptor sourceDescriptor = TableFactoryHarness.newBuilder().schema(schemaForBinaryLengthEnforcer()).source(new TestSource(rows)).build();
    tableEnv.createTable("T1", sourceDescriptor);
    // Default config - ignore (no trim)
    TableResult result = tableEnv.executeSql("SELECT * FROM T1");
    result.await();
    final List<Row> results = new ArrayList<>();
    result.collect().forEachRemaining(results::add);
    assertThat(results).containsExactlyInAnyOrderElementsOf(rows);
    // accordingly, based on their type length
    try {
        tableEnv.getConfig().set(TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER.key(), ExecutionConfigOptions.TypeLengthEnforcer.TRIM_PAD.name());
        result = tableEnv.executeSql("SELECT * FROM T1");
        result.await();
        final List<Row> expected = Arrays.asList(Row.of(1, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 11, 111, new byte[] { 1, 2, 3 }), Row.of(2, new byte[] { 1, 2, 3, 4, 5, 0, 0, 0 }, new byte[] { 1, 2, 3, 0, 0, 0 }, 22, 222, new byte[] { 1, 2, 3, 4, 5, 6 }), Row.of(3, new byte[] { 1, 2, 3, 4, 5, 6, 0, 0 }, new byte[] { 1, 2, 3, 4, 5, 0 }, 33, 333, new byte[] { 1, 2, 3, 4, 5, 6 }), Row.of(4, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 44, 444, new byte[] { 1, 2, 3, 4, 5, 6 }));
        final List<Row> resultsTrimmed = new ArrayList<>();
        result.collect().forEachRemaining(resultsTrimmed::add);
        assertThat(resultsTrimmed).containsExactlyInAnyOrderElementsOf(expected);
    } finally {
        tableEnv.getConfig().set(TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER.key(), ExecutionConfigOptions.TypeLengthEnforcer.IGNORE.name());
    }
}
Also used : TableResult(org.apache.flink.table.api.TableResult) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) TableDescriptor(org.apache.flink.table.api.TableDescriptor) Test(org.junit.Test)

Aggregations

TableResult (org.apache.flink.table.api.TableResult)39 Test (org.junit.Test)26 Row (org.apache.flink.types.Row)20 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)15 ArrayList (java.util.ArrayList)7 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)7 Table (org.apache.flink.table.api.Table)7 TableEnvironment (org.apache.flink.table.api.TableEnvironment)7 JobClient (org.apache.flink.core.execution.JobClient)4 Configuration (org.apache.flink.configuration.Configuration)3 ParameterTool (org.apache.flink.api.java.utils.ParameterTool)2 TableDescriptor (org.apache.flink.table.api.TableDescriptor)2 TableEnvironmentInternal (org.apache.flink.table.api.internal.TableEnvironmentInternal)2 CsvTableSink (org.apache.flink.table.sinks.CsvTableSink)2 DataType (org.apache.flink.table.types.DataType)2 File (java.io.File)1 IOException (java.io.IOException)1 BigDecimal (java.math.BigDecimal)1 Timestamp (java.sql.Timestamp)1 Random (java.util.Random)1